1 Commits

Author SHA1 Message Date
sam 0c1479a3ee Upgrade to doxus 0.7.2
Build Mumble Web 2 / linux_build (push) Failing after 58s
Build Mumble Web 2 / windows_build (push) Has been cancelled
2025-12-04 22:22:35 -07:00
50 changed files with 2206 additions and 4490 deletions
-1
View File
@@ -1 +0,0 @@
target
@@ -1,27 +0,0 @@
name: Build android container
on:
workflow_dispatch:
schedule:
- cron: "0 4 * * *"
jobs:
android-release-builder-container-build:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Log in to container registry
uses: docker/login-action@v3
with:
registry: git.ohea.xyz
username: ${{ secrets.CI_REGISTRY_USER }}
password: ${{ secrets.CI_REGISTRY_PASSWORD }}
- name: Build Android builder image
shell: bash
run: |
docker pull "$(grep -m1 '^FROM' ./docker/android-release-builder.Dockerfile | awk '{print $2}')"
docker build -t git.ohea.xyz/mumble/mumble-web2/android-release-builder:latest -f ./docker/android-release-builder.Dockerfile .
docker push git.ohea.xyz/mumble/mumble-web2/android-release-builder:latest
+1 -65
View File
@@ -18,7 +18,7 @@ jobs:
run: curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash
- name: Install dioxus-cli
run: cargo binstall dioxus-cli --version 0.7.3
run: cargo binstall dioxus-cli --version 0.7.2
- uses: Swatinem/rust-cache@v2
@@ -42,47 +42,6 @@ jobs:
path: target/release/mumble-web2-proxy
retention-days: 5
macos_build:
runs-on: macos
steps:
- name: Checkout
uses: actions/checkout@v5
- name: Restore Rust cache
uses: actions/cache/restore@v4
with:
path: |
~/.cargo
./target
key: rust-${{ runner.os }}-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
rust-${{ runner.os }}-
- name: Install cargo binstall
run: curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash
- name: Install dioxus-cli
run: cargo binstall dioxus-cli --version 0.7.3 --no-confirm
- name: Build dioxus project
run: dx bundle --platform macos --release -p mumble-web2-gui
- name: Save Rust cache
if: always()
uses: actions/cache/save@v4
with:
path: |
~/.cargo
./target
key: rust-${{ runner.os }}-${{ hashFiles('**/Cargo.lock') }}
- name: Upload mumble-web2-gui Artifact
uses: https://gitea.com/actions/gitea-upload-artifact@v4
with:
name: mumble-web2-gui-macos-arm64
path: gui/dist
retention-days: 5
windows_build:
runs-on: windows
steps:
@@ -124,26 +83,3 @@ jobs:
name: mumble-web2-gui-windows
path: gui/dist
retention-days: 5
android_build:
runs-on: ubuntu-latest
container:
image: git.ohea.xyz/mumble/mumble-web2/android-release-builder:latest
steps:
- name: Checkout
uses: actions/checkout@v5
- uses: Swatinem/rust-cache@v2
- name: Build dioxus project (x86_64-linux-android)
run: dx build --platform android --target x86_64-linux-android --release -p mumble-web2-gui
- name: Build dioxus project (aarch64-linux-android)
run: dx build --platform android --target aarch64-linux-android --release -p mumble-web2-gui
- name: Upload mumble-web2-gui Android Artifact
uses: https://gitea.com/actions/gitea-upload-artifact@v4
with:
name: mumble-web2-android
path: target/dx/mumble-web2-gui/release/android/app/app/build/outputs/apk/debug/app-debug.apk
retention-days: 5
@@ -22,6 +22,6 @@ jobs:
- name: Build Windows image
shell: bash
run: |
docker pull "$(grep -m1 '^FROM' ./docker/windows-release-builder.Dockerfile | awk '{print $2}')"
docker pull "$(grep -m1 '^FROM' Dockerfile | awk '{print $2}')"
docker build -t git.ohea.xyz/mumble/mumble-web2/windows-release-builder:latest -f ./docker/windows-release-builder.Dockerfile .
docker push git.ohea.xyz/mumble/mumble-web2/windows-release-builder:latest
+1 -1
View File
@@ -6,4 +6,4 @@ server_hash.txt
proxy/bundle
/config.toml
proxy/config.toml
*_onnx.tar.gz
gui/assets/*_onnx.tar.gz
-4
View File
@@ -1,4 +0,0 @@
{
"rust-analyzer.cargo.features": ["desktop","web"],
"rust-analyzer.cargo.noDefaultFeatures": false
}
Generated
+414 -458
View File
File diff suppressed because it is too large Load Diff
+1 -1
View File
@@ -1,6 +1,6 @@
[workspace]
resolver = "2"
members = ["client", "common", "gui", "proxy", "tui"]
members = ["common", "gui", "proxy"]
[workspace.dependencies]
serde = { version = "1.0.214", features = ["derive"] }
+3 -3
View File
@@ -2,12 +2,12 @@
## Running Desktop
1. `cargo install dioxus-cli --version 0.7.1`
1. `cargo install dioxus-cli --version 0.7.2`
2. `dx run -p mumble-web2-gui --platform desktop --release`
## Running Web (development)
1. `cargo install dioxus-cli --version 0.7.1`
1. `cargo install dioxus-cli --version 0.7.2`
3. `dx serve -p mumble-web2-gui --platform web`
2. `cd docker && docker compose up`
4. connect to `https://localhost:64444`
@@ -15,7 +15,7 @@
## Running Web (with `proxy` only)
1. `cargo install dioxus-cli --version 0.7.1`
1. `cargo install dioxus-cli --version 0.7.2`
2. `dx build -p mumble-web2-gui --platform web --release`
3. `cp config.toml.example config.toml`
4. `cargo run -p mumble-web2-proxy` in the background
-146
View File
@@ -1,146 +0,0 @@
[package]
name = "mumble-web2-client"
version = "0.1.0"
edition = "2021"
[dependencies]
# Web Dependencies
# ================
wasm-bindgen = { version = "^0.2.92", optional = true }
wasm-bindgen-futures = { version = "^0.4.42", optional = true }
wasm-streams = { version = "^0.4.0", optional = true }
serde-wasm-bindgen = { version = "^0.6.5", optional = true }
js-sys = { version = "=0.3.82", optional = true }
web-sys = { version = "=0.3.82", features = [
"WebTransport",
"console",
"WebTransportOptions",
"WebTransportBidirectionalStream",
"WebTransportSendStream",
"WebTransportReceiveStream",
"Navigator",
"MediaDevices",
"AudioDecoder",
"AudioDecoderInit",
"AudioData",
"AudioEncoderConfig",
"AudioDecoderConfig",
"EncodedAudioChunk",
"EncodedAudioChunkInit",
"EncodedAudioChunkType",
"CodecState",
"AudioContext",
"AudioContextOptions",
"MediaStream",
"GainNode",
"MediaStreamAudioSourceNode",
"BaseAudioContext",
"AudioDestinationNode",
"AudioWorkletNode",
"AudioWorklet",
"AudioWorkletProcessor",
"MessagePort",
"MediaStreamConstraints",
"WorkletOptions",
"AudioEncoder",
"AudioEncoderInit",
"AudioDataInit",
"HtmlAnchorElement",
"Url",
"Blob",
"AudioDataCopyToOptions",
"AudioSampleFormat",
"Storage",
], optional = true }
gloo-timers = { version = "^0.3.0", features = ["futures"], optional = true }
tracing-web = { version = "^0.1.3", optional = true }
# Desktop Dependecies
# ===================
tokio = { version = "^1.41.1", features = ["net", "rt"], optional = true }
tokio-rustls = { version = "^0.26.0", optional = true }
opus = { version = "0.3.0", optional = true }
cpal = { version = "0.15.3", optional = true }
dasp_ring_buffer = { version = "0.11.0", optional = true }
etcetera = { version = "0.10.0", optional = true }
# Base Dependencies
# ================
manganis = "0.7.2"
once_cell = "1.19.0"
asynchronous-codec = { workspace = true }
futures = "^0.3.30"
merge-io = "^0.3.0"
mumble-protocol = { workspace = true }
serde_json = "1"
tokio-util = { version = "^0.7.11", features = ["codec", "compat"] }
byteorder = "1.5"
ogg = "^0.9.1"
ordermap = "^0.5.3"
html-purifier = "^0.3.0"
markdown = "^0.3.0"
futures-channel = "^0.3.30"
mumble-web2-common = { workspace = true }
serde = { workspace = true }
tracing-subscriber = { version = "^0.3.18", features = ["ansi"] }
tracing = "^0.1.40"
color-eyre = "^0.6.3"
crossbeam-queue = "^0.3.11"
lol_html = "^2.2.0"
base64 = "^0.22"
mime_guess = "^2.0.5"
async_cell = "^0.2.3"
reqwest = { version = "^0.12.22", features = ["json"] }
dioxus-asset-resolver = "0.7.2"
# Denoising
# =========
deep_filter = { git = "https://github.com/Rikorose/DeepFilterNet.git", rev = "d375b2d8309e0935d165700c91da9de862a99c31", features = [
"tract",
] }
crossbeam = "0.8.4"
# Android dependencies for requesting permissions
[target.'cfg(target_os = "android")'.dependencies]
android-permissions = "0.1.2"
jni = "0.21.1"
ndk-context = "0.1.1"
[patch.crates-io]
tract-hir = "=0.12.4"
tract-core = "=0.12.4"
tract-onnx = "=0.12.4"
tract-pulse = "=0.12.4"
[features]
embed-denoiser = []
web = [
"wasm-bindgen",
"wasm-bindgen-futures",
"wasm-streams",
"serde-wasm-bindgen",
"js-sys",
"web-sys",
"gloo-timers",
"tracing-web",
"deep_filter/wasm",
]
desktop = [
"tokio",
"tokio-rustls",
"tracing-subscriber/env-filter",
"opus",
"cpal",
"dasp_ring_buffer",
"etcetera",
"mumble-web2-common/networking",
]
mobile = [
"tokio",
"tokio-rustls",
"tracing-subscriber/env-filter",
"opus",
"cpal",
"dasp_ring_buffer",
"mumble-web2-common/networking",
]
-86
View File
@@ -1,86 +0,0 @@
use std::env;
use std::path::Path;
use std::process::Command;
fn version_env() -> Option<()> {
if env::var("MUMBLE_WEB2_VERSION").is_ok() {
return Some(());
}
let output = Command::new("git")
.args(["rev-parse", "--short", "HEAD"])
.output()
.ok()?;
let git_hash = String::from_utf8(output.stdout).ok()?;
let git_hash = git_hash.trim(); // drop trailing newline
let status = Command::new("git")
.args(["status", "--porcelain"])
.output()
.ok()?;
let dirty = match status.stdout.is_empty() {
true => "",
false => "-dirty",
};
// Expose it as a compile-time env var
println!("cargo::rustc-env=MUMBLE_WEB2_VERSION=git-{git_hash}{dirty}");
// Optional: rebuild when HEAD changes
println!("cargo::rerun-if-changed=.git/HEAD");
Some(())
}
fn download_deepfilternet() {
// Define the target directory and file
let assets_dir = "assets";
let target_file = format!("{}/DeepFilterNet3_ll_onnx.tar.gz", assets_dir);
let target_path = Path::new(&target_file);
// Check if the file already exists
if target_path.exists() {
println!(
"cargo::warning=DeepFilterNet model already exists at {}",
target_file
);
return;
}
println!(
"cargo::warning=Downloading DeepFilterNet model to {}...",
target_file
);
// Download the file using curl
let url = "https://github.com/Rikorose/DeepFilterNet/raw/refs/heads/main/models/DeepFilterNet3_ll_onnx.tar.gz";
let status = Command::new("curl")
.args([
"-L", // Follow redirects
"-o",
&target_file, // Output file
url,
])
.status()
.expect("Failed to execute curl command. Make sure curl is installed.");
if !status.success() {
println!("cargo::error=Failed to download DeepFilterNet model from {url}");
return;
}
println!(
"cargo::warning=Successfully downloaded DeepFilterNet model to {}",
target_file
);
// Rerun this build script if the target file is deleted
println!("cargo::rerun-if-changed={}", target_file);
}
fn main() {
version_env();
download_deepfilternet();
}
-223
View File
@@ -1,223 +0,0 @@
use mime_guess::Mime;
use mumble_web2_common::ProxyOverrides;
use ordermap::OrderSet;
use std::collections::{HashMap, HashSet};
use std::ops::{Deref, DerefMut};
use std::{fmt, sync::Arc};
pub type ChannelId = u32;
pub type UserId = u32;
#[derive(Debug)]
pub enum ConnectionState {
Disconnected,
Connecting,
Connected,
Failed(String),
}
#[derive(Debug, Clone)]
pub struct AudioSettings {
pub denoise: bool,
}
#[derive(Debug)]
pub enum Command {
Connect {
address: String,
username: String,
config: ProxyOverrides,
},
SendChat {
markdown: String,
channels: Vec<ChannelId>,
},
SendFile {
bytes: Vec<u8>,
name: String,
mime: Option<Mime>,
channels: Vec<ChannelId>,
},
SetMute {
mute: bool,
},
SetDeaf {
deaf: bool,
},
EnterChannel {
channel: ChannelId,
user: UserId,
},
UpdateAudioSettings(AudioSettings),
Disconnect,
}
#[derive(Default, Debug)]
pub struct UserState {
pub name: String,
pub channel: ChannelId,
pub deaf: bool,
pub mute: bool,
pub suppress: bool,
pub self_deaf: bool,
pub self_mute: bool,
}
#[derive(Debug)]
pub struct Chat {
pub raw: String,
pub dangerous_html: String,
pub sender: Option<UserId>,
}
#[derive(Default, Debug)]
pub struct ChannelState {
pub name: String,
pub children: OrderSet<ChannelId>,
pub users: OrderSet<UserId>,
pub parent: Option<ChannelId>,
pub position: i32,
}
impl ChannelState {
pub fn update_from_channel_state(
&mut self,
channel_state: &mumble_protocol::control::msgs::ChannelState,
) {
if channel_state.has_position() {
self.position = channel_state.get_position();
}
if channel_state.has_parent() {
self.parent = Some(channel_state.get_parent());
}
if channel_state.has_name() {
self.name = channel_state.get_name().to_string();
}
}
}
#[derive(Default, Debug)]
pub struct ChannelsState {
pub channels: HashMap<ChannelId, ChannelState>,
}
impl ChannelsState {
pub fn update_from_channel_state(
&mut self,
channel_state: &mumble_protocol::control::msgs::ChannelState,
) {
self.channels
.entry(channel_state.get_channel_id())
.or_default()
.update_from_channel_state(channel_state);
self.update_channel_parents();
}
pub fn update_from_channel_remove(
&mut self,
channel_remove: &mumble_protocol::control::msgs::ChannelRemove,
) {
self.channels.remove(&channel_remove.get_channel_id());
self.update_channel_parents();
}
pub fn update_channel_parents(&mut self) {
// Zero out existing children
for state in self.channels.values_mut() {
state.children.clear();
}
let mut to_sort: Vec<(ChannelId, Option<ChannelId>, i32, String)> = Vec::new();
for (id, state) in self.channels.iter() {
// Handle channels with no parent (the root channel)
let Some(parent_id) = state.parent else {
to_sort.push((*id, None, 0, state.name.clone()));
continue;
};
// If a channel has a parent that we haven't gotten a channel
// state packet for, ignore it
if !self.channels.contains_key(&parent_id) {
continue;
}
to_sort.push((*id, Some(parent_id), state.position, state.name.clone()));
}
let pos_name: HashMap<ChannelId, (i32, String)> = self
.channels
.iter()
.map(|(&id, state)| (id, (state.position, state.name.clone())))
.collect();
let mut updated: HashSet<ChannelId> = HashSet::new();
while updated.len() < to_sort.len() {
for &(id, ref parent_id, position, ref name) in &to_sort {
let Some(parent_id) = parent_id else {
updated.insert(id);
continue;
};
if updated.contains(&id) || !updated.contains(&parent_id) {
continue;
}
// Unwrap should never fail here since we pre filter
let parent = self.channels.get_mut(&parent_id).unwrap();
let mut insert_index = parent.children.len();
for (i, &child) in parent.children.iter().enumerate() {
let (p, ref n) = pos_name[&child];
if (position == p && name < n) || p > position {
insert_index = i;
break;
}
}
parent.children.insert_before(insert_index, id);
updated.insert(id);
}
}
}
}
#[derive(Default, Debug)]
pub struct ServerState {
pub channels_state: ChannelsState,
pub users: HashMap<UserId, UserState>,
pub chat: Vec<Chat>,
pub session: Option<UserId>,
}
impl ServerState {
pub fn this_user(&self) -> Option<&UserState> {
self.users.get(&self.session?)
}
}
pub trait Reactivity {
type Signal<T>;
fn new<T: 'static>(value: T) -> Self::Signal<T>;
fn read<T: 'static>(signal: &Self::Signal<T>) -> impl Deref<Target = T>;
fn write<T: 'static>(signal: &Self::Signal<T>) -> impl DerefMut<Target = T>;
}
pub struct State<R: Reactivity> {
pub status: R::Signal<ConnectionState>,
pub server: R::Signal<ServerState>,
pub audio: R::Signal<AudioSettings>,
}
impl<R: Reactivity> fmt::Debug for State<R> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("State")
.field("status", &*R::read(&self.status))
.field("server", &*R::read(&self.server))
.finish()
}
}
pub type SharedState<R> = Arc<State<R>>;
-91
View File
@@ -1,91 +0,0 @@
use crate::app::Command;
use color_eyre::eyre::Error;
use dioxus::hooks::UnboundedReceiver;
use mumble_web2_common::{ClientConfig, ServerStatus};
use std::future::Future;
use std::time::Duration;
/// Mobile platform implementation using Tokio, native audio, and Android permissions.
pub struct MobilePlatform;
impl super::PlatformInterface for MobilePlatform {
type AudioSystem = super::native_audio::NativeAudioSystem;
async fn load_config() -> color_eyre::Result<ClientConfig> {
Ok(ClientConfig {
proxy_url: None,
cert_hash: None,
any_server: true,
})
}
fn load_username() -> Option<String> {
None
}
fn load_server_url() -> Option<String> {
None
}
fn set_default_username(_username: &str) -> Option<()> {
None
}
fn set_default_server(server: &str) -> Option<()> {
None
}
async fn network_connect(
address: String,
username: String,
event_rx: &mut UnboundedReceiver<Command>,
gui_config: &ClientConfig,
) -> Result<(), Error> {
super::connect::network_connect(address, username, event_rx, gui_config).await
}
async fn get_status(client: &reqwest::Client) -> color_eyre::Result<ServerStatus> {
super::connect::get_status(client).await
}
fn init_logging() {
use tracing::level_filters::LevelFilter;
use tracing_subscriber::filter::EnvFilter;
let env_filter = EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env_lossy();
tracing_subscriber::fmt()
.with_target(true)
.with_level(true)
.with_env_filter(env_filter)
.init();
}
fn request_permissions() {
request_recording_permission();
}
async fn sleep(duration: Duration) {
tokio::time::sleep(duration).await;
}
}
#[cfg(not(target_os = "android"))]
pub fn request_recording_permission() {}
#[cfg(target_os = "android")]
pub fn request_recording_permission() {
use android_permissions::{PermissionManager, RECORD_AUDIO};
use jni::{objects::JObject, JavaVM};
let ctx = ndk_context::android_context();
let vm = unsafe { JavaVM::from_raw(ctx.vm().cast()).unwrap() };
let activity = unsafe { JObject::from_raw(ctx.context().cast()) };
let manager = PermissionManager::create(vm, activity).unwrap();
if !manager.check(&RECORD_AUDIO).unwrap() {
manager.request(&[&RECORD_AUDIO]).unwrap();
}
}
-115
View File
@@ -1,115 +0,0 @@
use crate::app::{Command, SharedState};
use crate::Reactivity;
use color_eyre::eyre::Error;
use futures_channel::mpsc::UnboundedReceiver;
use mumble_protocol::control::ClientControlCodec;
use std::net::ToSocketAddrs;
use std::sync::Arc;
use tokio::net::TcpStream;
use tokio_rustls::rustls;
use tokio_rustls::rustls::client::danger::{HandshakeSignatureValid, ServerCertVerifier};
use tokio_rustls::rustls::pki_types::{CertificateDer, ServerName, UnixTime};
use tokio_rustls::rustls::ClientConfig;
use tokio_rustls::rustls::DigitallySignedStruct;
use tokio_rustls::TlsConnector;
use tokio_util::compat::{TokioAsyncReadCompatExt as _, TokioAsyncWriteCompatExt as _};
use tracing::{info, instrument};
use mumble_web2_common::ProxyOverrides;
#[derive(Debug)]
struct NoCertificateVerification;
impl ServerCertVerifier for NoCertificateVerification {
fn verify_server_cert(
&self,
_end_entity: &CertificateDer<'_>,
_intermediates: &[CertificateDer<'_>],
_server_name: &ServerName<'_>,
_ocsp: &[u8],
_now: UnixTime,
) -> Result<rustls::client::danger::ServerCertVerified, rustls::Error> {
Ok(rustls::client::danger::ServerCertVerified::assertion())
}
fn verify_tls12_signature(
&self,
_message: &[u8],
_cert: &CertificateDer<'_>,
_dss: &DigitallySignedStruct,
) -> Result<HandshakeSignatureValid, rustls::Error> {
Ok(HandshakeSignatureValid::assertion())
}
fn verify_tls13_signature(
&self,
_message: &[u8],
_cert: &CertificateDer<'_>,
_dss: &DigitallySignedStruct,
) -> Result<HandshakeSignatureValid, rustls::Error> {
Ok(HandshakeSignatureValid::assertion())
}
fn supported_verify_schemes(&self) -> Vec<rustls::SignatureScheme> {
vec![
rustls::SignatureScheme::RSA_PKCS1_SHA1,
rustls::SignatureScheme::ECDSA_SHA1_Legacy,
rustls::SignatureScheme::RSA_PKCS1_SHA256,
rustls::SignatureScheme::ECDSA_NISTP256_SHA256,
rustls::SignatureScheme::RSA_PKCS1_SHA384,
rustls::SignatureScheme::ECDSA_NISTP384_SHA384,
rustls::SignatureScheme::RSA_PKCS1_SHA512,
rustls::SignatureScheme::ECDSA_NISTP521_SHA512,
rustls::SignatureScheme::RSA_PSS_SHA256,
rustls::SignatureScheme::RSA_PSS_SHA384,
rustls::SignatureScheme::RSA_PSS_SHA512,
rustls::SignatureScheme::ED25519,
rustls::SignatureScheme::ED448,
]
}
}
#[instrument]
pub async fn network_connect(
address: String,
username: String,
event_rx: &mut UnboundedReceiver<Command>,
overrides: &ProxyOverrides,
state: SharedState<impl Reactivity>,
) -> Result<(), Error> {
info!("connecting");
let config = ClientConfig::builder()
.dangerous()
.with_custom_certificate_verifier(Arc::new(NoCertificateVerification))
.with_no_client_auth();
let connector = TlsConnector::from(Arc::new(config));
let addr = format!("{}:{}", address, 64738)
.to_socket_addrs()?
.next()
.unwrap();
let server_tcp = TcpStream::connect(addr).await?;
let server_stream = connector
//.connect("127.0.0.1".try_into()?, server_tcp)
.connect(address.try_into()?, server_tcp)
.await?;
let (read_server, write_server) = tokio::io::split(server_stream);
let read_codec = ClientControlCodec::new();
let write_codec = ClientControlCodec::new();
let reader = asynchronous_codec::FramedRead::new(read_server.compat(), read_codec);
let writer = asynchronous_codec::FramedWrite::new(write_server.compat_write(), write_codec);
let (outgoing_send, outgoing_recv) = futures_channel::mpsc::unbounded();
spawn(crate::sender_loop(outgoing_recv, writer));
crate::network_loop(username, state, event_rx, outgoing_send, reader).await
}
#[allow(unused)]
pub use tokio::spawn;
#[allow(unused)]
pub type SpawnHandle = tokio::runtime::Handle;
-62
View File
@@ -1,62 +0,0 @@
use crate::app::{Command, SharedState};
use crate::Reactivity;
use color_eyre::eyre::Error;
use futures_channel::mpsc::UnboundedReceiver;
use mumble_web2_common::{ProxyOverrides, ServerStatus};
use std::time::Duration;
/// Desktop platform implementation using Tokio and native audio.
pub struct DesktopPlatform;
impl super::PlatformInterface for DesktopPlatform {
type AudioSystem = super::native_audio::NativeAudioSystem;
type ConfigSystem = super::native_config::NativeConfigSystem;
async fn sleep(duration: Duration) {
tokio::time::sleep(duration).await;
}
async fn load_proxy_overrides() -> color_eyre::Result<ProxyOverrides> {
Ok(ProxyOverrides {
proxy_url: None,
cert_hash: None,
any_server: true,
})
}
async fn network_connect(
address: String,
username: String,
event_rx: &mut UnboundedReceiver<Command>,
overrides: &ProxyOverrides,
state: SharedState<impl Reactivity>,
) -> Result<(), Error> {
super::connect::network_connect(address, username, event_rx, overrides, state).await
}
async fn get_status(
_client: &reqwest::Client,
address: &str,
) -> color_eyre::Result<ServerStatus> {
mumble_web2_common::ping_server(address, 64738).await
}
fn init_logging() {
use tracing::level_filters::LevelFilter;
use tracing_subscriber::filter::EnvFilter;
let env_filter = EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env_lossy();
tracing_subscriber::fmt()
.with_target(true)
.with_level(true)
.with_env_filter(env_filter)
.init();
}
fn request_permissions() {
// No-op on desktop
}
}
-80
View File
@@ -1,80 +0,0 @@
use crate::app::{Command, SharedState};
use crate::Reactivity;
use color_eyre::eyre::Error;
use futures_channel::mpsc::UnboundedReceiver;
use mumble_web2_common::{ProxyOverrides, ServerStatus};
use std::time::Duration;
/// Mobile platform implementation using Tokio, native audio, and Android permissions.
pub struct MobilePlatform;
impl super::PlatformInterface for MobilePlatform {
type AudioSystem = super::native_audio::NativeAudioSystem;
type ConfigSystem = super::native_config::NativeConfigSystem;
async fn load_proxy_overrides() -> color_eyre::Result<ProxyOverrides> {
Ok(ProxyOverrides {
proxy_url: None,
cert_hash: None,
any_server: true,
})
}
async fn network_connect(
address: String,
username: String,
event_rx: &mut UnboundedReceiver<Command>,
overrides: &ProxyOverrides,
state: SharedState<impl Reactivity>,
) -> Result<(), Error> {
super::connect::network_connect(address, username, event_rx, overrides, state).await
}
async fn get_status(
_client: &reqwest::Client,
address: &str,
) -> color_eyre::Result<ServerStatus> {
mumble_web2_common::ping_server(address, 64738).await
}
fn init_logging() {
use tracing::level_filters::LevelFilter;
use tracing_subscriber::filter::EnvFilter;
let env_filter = EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env_lossy();
tracing_subscriber::fmt()
.with_target(true)
.with_level(true)
.with_env_filter(env_filter)
.init();
}
fn request_permissions() {
request_recording_permission();
}
async fn sleep(duration: Duration) {
tokio::time::sleep(duration).await;
}
}
#[cfg(not(target_os = "android"))]
pub fn request_recording_permission() {}
#[cfg(target_os = "android")]
pub fn request_recording_permission() {
use android_permissions::{PermissionManager, RECORD_AUDIO};
use jni::{objects::JObject, JavaVM};
let ctx = ndk_context::android_context();
let vm = unsafe { JavaVM::from_raw(ctx.vm().cast()).unwrap() };
let activity = unsafe { JObject::from_raw(ctx.context().cast()) };
let manager = PermissionManager::create(vm, activity).unwrap();
if !manager.check(&RECORD_AUDIO).unwrap() {
manager.request(&[&RECORD_AUDIO]).unwrap();
}
}
-193
View File
@@ -1,193 +0,0 @@
//! Platform abstraction layer
//!
//! This module defines traits that each platform (web, desktop, mobile) must implement.
//! The traits make the platform boundary explicit and provide compile-time verification.
#![allow(async_fn_in_trait)]
use crate::app::{Command, SharedState};
use crate::effects::AudioProcessor;
use crate::Reactivity;
use color_eyre::eyre::Error;
use futures_channel::mpsc::UnboundedReceiver;
use mumble_web2_common::{ProxyOverrides, ServerStatus};
use std::collections::HashMap;
use std::future::Future;
use std::time::Duration;
// ============================================================================
// Trait Definitions
// ============================================================================
/// Platform-specific audio subsystem for capturing microphone input and creating playback streams.
///
/// The audio system handles Opus encoding internally - callers receive encoded frames
/// ready for network transmission.
pub trait AudioSystemInterface: Sized {
/// The player type returned by [`create_player`](Self::create_player).
type AudioPlayer: AudioPlayerInterface;
/// Initialize the audio system.
async fn new() -> Result<Self, Error>;
/// Set the processor for the microphone input, mainly noise cancellation settings.
fn set_processor(&self, processor: AudioProcessor);
/// Begin listening to microphone input, calling the `each` function with
/// encoded opus frames.
fn start_recording(
&mut self,
each: impl FnMut(Vec<u8>, bool) + Send + 'static,
) -> Result<(), Error>;
/// Begin playback of an audio stream, returning an object that can be passed opus frames.
fn create_player(&mut self) -> Result<Self::AudioPlayer, Error>;
}
/// A handle to an active audio playback stream for a single remote user.
///
/// Each connected user gets their own `AudioPlayer` instance, which decodes
/// incoming Opus frames and outputs PCM audio to the platform's audio device.
/// The player manages its own decoder state and output buffer.
pub trait AudioPlayerInterface {
/// Decode and play an Opus-encoded audio frame.
fn play_opus(&mut self, payload: &[u8]);
}
pub trait ConfigSystemInterface: Sized + Clone {
fn new() -> Result<Self, Error>;
fn config_get<T>(&self, key: &str) -> Option<T>
where
T: serde::de::DeserializeOwned;
fn config_set<T>(&self, key: &str, value: &T)
where
T: serde::Serialize;
}
/// This is the main trait that each platform must implement. It combines all
/// platform-specific functionality into a single interface, providing compile-time
/// verification that all platforms implement the required functionality.
pub trait PlatformInterface {
type AudioSystem: AudioSystemInterface;
type ConfigSystem: ConfigSystemInterface;
/// Initialize logging for the platform.
fn init_logging();
/// Request runtime permissions (Android audio recording, etc.).
fn request_permissions();
/// Establish a connection to the Mumble server and run the network loop.
fn network_connect(
address: String,
username: String,
event_rx: &mut UnboundedReceiver<Command>,
proxy_overrides: &ProxyOverrides,
state: SharedState<impl Reactivity>,
) -> impl Future<Output = Result<(), Error>>;
/// Get server status (user count, version, etc.) for the given address.
///
/// On web, this goes through the proxy's /status endpoint and ignores `address`
/// (the proxy is bound to a specific server). On desktop/mobile, this pings the
/// given address directly via UDP.
fn get_status(
client: &reqwest::Client,
address: &str,
) -> impl Future<Output = color_eyre::Result<ServerStatus>>;
/// Load the proxy overrides (proxy URL, cert hash, etc.).
fn load_proxy_overrides() -> impl Future<Output = color_eyre::Result<ProxyOverrides>>;
/// Async sleep for the given duration.
fn sleep(duration: Duration) -> impl Future<Output = ()>;
}
// ============================================================================
// Platform Modules
// ============================================================================
mod stub;
#[cfg(any(feature = "desktop", feature = "mobile"))]
mod connect;
#[cfg(any(feature = "desktop", feature = "mobile"))]
mod native_audio;
#[cfg(any(feature = "desktop", feature = "mobile"))]
mod native_config;
#[cfg(feature = "desktop")]
mod desktop;
#[cfg(feature = "mobile")]
mod mobile;
#[cfg(feature = "web")]
mod web;
// ============================================================================
// Platform Type Alias
// ============================================================================
#[cfg(feature = "web")]
pub type Platform = web::WebPlatform;
#[cfg(all(feature = "desktop", not(feature = "web")))]
pub type Platform = desktop::DesktopPlatform;
#[cfg(all(feature = "mobile", not(feature = "web"), not(feature = "desktop")))]
pub type Platform = mobile::MobilePlatform;
#[cfg(all(
not(feature = "mobile"),
not(feature = "web"),
not(feature = "desktop")
))]
pub type Platform = stub::StubPlatform;
pub type AudioSystem = <Platform as PlatformInterface>::AudioSystem;
pub type AudioPlayer = <AudioSystem as AudioSystemInterface>::AudioPlayer;
pub type ConfigSystem = <Platform as PlatformInterface>::ConfigSystem;
// ========================
// Platform Async Runtime
// ========================
// Note: these can not be part of the Platform because they differ in Send requiremets
#[cfg(all(any(feature = "desktop", feature = "mobile"), not(feature = "web")))]
pub use connect::{spawn, SpawnHandle};
#[cfg(all(
not(feature = "desktop"),
not(feature = "mobile"),
not(feature = "web")
))]
pub use stub::{spawn, SpawnHandle};
#[cfg(feature = "web")]
pub use web::{spawn, SpawnHandle};
// =======================
// Compile-time Assertions
// =======================
const _: () = {
fn assert_platform<T: PlatformInterface>() {}
// Check each implementation, and prevent warnings that the implementations are unused.
#[cfg(feature = "web")]
let _ = assert_platform::<web::WebPlatform>;
#[cfg(feature = "desktop")]
let _ = assert_platform::<desktop::DesktopPlatform>;
#[cfg(feature = "mobile")]
let _ = assert_platform::<mobile::MobilePlatform>;
let _ = assert_platform::<stub::StubPlatform>;
};
fn global_default_config() -> HashMap<String, serde_json::Value> {
serde_json::json!({})
.as_object()
.unwrap()
.clone()
.into_iter()
.collect()
}
-219
View File
@@ -1,219 +0,0 @@
use crate::effects::{AudioProcessor, AudioProcessorSender, TransmitState};
use color_eyre::eyre::{eyre, Error};
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait as _};
use std::mem::replace;
use std::sync::Arc;
use std::sync::Mutex;
use tracing::{error, info, warn};
pub struct NativeAudioSystem {
output: cpal::Device,
input: cpal::Device,
processors: AudioProcessorSender,
recording_stream: Option<cpal::Stream>,
}
const SAMPLE_RATE: u32 = 48_000;
const PACKET_SAMPLES: u32 = 960;
// Divide by 1000 to get samples per ms, then multiply by 60ms for max Opus frame size.
const MAX_DECODE_SAMPLES: usize = SAMPLE_RATE as usize / 1000 * 60;
fn encode_and_send(
state: TransmitState,
output_buffer: &mut Vec<f32>,
encoder: &mut opus::Encoder,
each: &mut impl FnMut(Vec<u8>, bool),
) {
let (is_terminator, should_encode) = match state {
TransmitState::Silent => return,
TransmitState::Transmitting => (false, output_buffer.len() >= PACKET_SAMPLES as usize),
TransmitState::Terminator => {
output_buffer.resize(PACKET_SAMPLES as usize, 0.0);
(true, true)
}
};
if should_encode {
let remainder = output_buffer.split_off(PACKET_SAMPLES as usize);
let frame = replace(output_buffer, remainder);
match encoder.encode_vec_float(&frame, frame.len() * 2) {
Ok(encoded) => each(encoded, is_terminator),
Err(e) => error!("error encoding {} samples: {e:?}", frame.len()),
}
}
}
type Buffer = Arc<Mutex<dasp_ring_buffer::Bounded<Vec<i16>>>>;
impl NativeAudioSystem {
fn choose_config(
&self,
configs: impl Iterator<Item = cpal::SupportedStreamConfigRange>,
) -> Result<cpal::StreamConfig, Error> {
let mut supported_configs: Vec<_> = configs
.filter_map(|cfg| cfg.try_with_sample_rate(cpal::SampleRate(SAMPLE_RATE)))
.filter(|cfg| cfg.sample_format() == cpal::SampleFormat::I16)
.map(|cfg| cpal::StreamConfig {
buffer_size: cpal::BufferSize::Fixed(match *cfg.buffer_size() {
cpal::SupportedBufferSize::Range { min, max } => 480.clamp(min, max),
cpal::SupportedBufferSize::Unknown => 480,
}),
..cfg.config()
})
.collect();
supported_configs.sort_by(|a, b| {
let cpal::BufferSize::Fixed(a_buf) = a.buffer_size else {
unreachable!()
};
let cpal::BufferSize::Fixed(b_buf) = b.buffer_size else {
unreachable!()
};
Ord::cmp(&a.channels, &b.channels).then(Ord::cmp(&a_buf, &b_buf))
});
supported_configs
.get(0)
.cloned()
.ok_or(eyre!("no supported stream configs"))
}
}
impl super::AudioSystemInterface for NativeAudioSystem {
type AudioPlayer = NativeAudioPlayer;
async fn new() -> Result<Self, Error> {
let host = cpal::default_host();
let name = host.id();
let processors = AudioProcessorSender::default();
Ok(NativeAudioSystem {
output: host
.default_output_device()
.ok_or(eyre!("no output devices from {name:?}"))?,
input: host
.default_input_device()
.ok_or(eyre!("no input devices from {name:?}"))?,
processors,
recording_stream: None,
})
}
fn set_processor(&self, processor: AudioProcessor) {
self.processors.store(Some(processor))
}
fn start_recording(
&mut self,
mut each: impl FnMut(Vec<u8>, bool) + Send + 'static,
) -> Result<(), Error> {
let config = self.choose_config(self.input.supported_input_configs()?)?;
info!(
"creating recording on {:?} with {:#?}",
self.input.name()?,
config
);
let mut encoder =
opus::Encoder::new(SAMPLE_RATE, opus::Channels::Mono, opus::Application::Voip)?;
let mut current_processor = AudioProcessor::new(false);
let mut output_buffer = Vec::new();
let processors = self.processors.clone();
let error_callback = move |e: cpal::StreamError| error!("error recording: {e:?}");
let data_callback = move |frame: &[f32], _: &cpal::InputCallbackInfo| {
if let Some(new_processor) = processors.take() {
current_processor = new_processor;
}
let state =
current_processor.process(frame, config.channels as usize, &mut output_buffer);
encode_and_send(state, &mut output_buffer, &mut encoder, &mut each);
};
match self
.input
.build_input_stream(&config, data_callback, error_callback, None)
{
Ok(stream) => {
stream.play()?;
self.recording_stream = Some(stream);
Ok(())
}
Err(err) => {
self.recording_stream = None;
Err(err.into())
}
}
}
fn create_player(&mut self) -> Result<NativeAudioPlayer, Error> {
let config = self.choose_config(self.output.supported_output_configs()?)?;
info!(
"creating player on {:?} with {:#?}",
self.output.name().ok(),
&config
);
let buffer = Arc::new(Mutex::new(dasp_ring_buffer::Bounded::from_raw_parts(
0,
0,
vec![
0;
SAMPLE_RATE as usize/4 // 250ms of buffer
],
)));
let decoder = opus::Decoder::new(SAMPLE_RATE, opus::Channels::Mono)?;
let stream = {
let buffer = buffer.clone();
self.output.build_output_stream(
&config,
move |frame, _info| {
let mut buffer = buffer.lock().unwrap();
for x in frame.chunks_mut(config.channels as usize) {
match buffer.pop() {
Some(y) => {
x.fill(y);
}
None => {
x.fill(0);
}
}
}
},
move |err| error!("could not create output stream {err:?}"),
None,
)?
};
stream.play()?;
Ok(NativeAudioPlayer {
decoder,
stream,
buffer,
tmp: vec![0; MAX_DECODE_SAMPLES],
})
}
}
pub struct NativeAudioPlayer {
decoder: opus::Decoder,
stream: cpal::Stream,
buffer: Buffer,
tmp: Vec<i16>,
}
impl super::AudioPlayerInterface for NativeAudioPlayer {
fn play_opus(&mut self, payload: &[u8]) {
let len = match self.decoder.decode(payload, &mut self.tmp, false) {
Ok(l) => l,
Err(e) => {
error!("opus decode error {e:?}");
return;
}
};
let mut buffer = self.buffer.lock().unwrap();
let mut overrun = 0;
for x in &self.tmp[..len] {
if let Some(_) = buffer.push(*x) {
overrun += 1;
}
}
if overrun > 0 {
warn!("playback overrun by {overrun} samples");
}
}
}
-117
View File
@@ -1,117 +0,0 @@
use color_eyre::eyre::Error;
use std::collections::HashMap;
use tracing::info;
#[derive(Clone, PartialEq)]
pub struct NativeConfigSystem {
config_path: std::path::PathBuf,
}
impl super::ConfigSystemInterface for NativeConfigSystem {
fn new() -> color_eyre::Result<Self, Error> {
return Ok(NativeConfigSystem {
config_path: get_config_path()?,
});
}
fn config_get<T>(&self, key: &str) -> Option<T>
where
T: serde::de::DeserializeOwned,
{
let config = load_config_map(&self.config_path);
let Some(value_untyped) = config.get(key).cloned().or_else(|| config_get_default(key))
else {
return None;
};
match serde_json::from_value::<T>(value_untyped) {
Ok(v) => Some(v),
Err(_) => {
let default_value = config_get_default(key)
.expect("Default value required after config parse failure");
Some(
serde_json::from_value::<T>(default_value)
.expect("Default value could not be parsed"),
)
}
}
}
fn config_set<T>(&self, key: &str, value: &T)
where
T: serde::Serialize,
{
let mut config = load_config_map(&self.config_path);
let json_value = serde_json::to_value(value).expect("failed to serialize config value");
config.insert(key.to_string(), json_value);
save_config_map(&config).expect("failed to set config")
}
}
#[cfg(any(feature = "desktop"))]
fn get_config_path() -> color_eyre::Result<std::path::PathBuf> {
use etcetera::{choose_app_strategy, AppStrategy, AppStrategyArgs};
let strategy = choose_app_strategy(AppStrategyArgs {
top_level_domain: "xyz".to_string(),
author: "ohea".to_string(),
app_name: "Mumble Web2".to_string(),
})
.expect("failed to choose app strategy");
Ok(strategy.config_dir().join("config.json"))
}
#[cfg(target_os = "android")]
fn get_config_path() -> color_eyre::Result<std::path::PathBuf> {
let ctx = ndk_context::android_context();
let vm = unsafe { jni::JavaVM::from_raw(ctx.vm().cast()) }?;
let mut env = vm.attach_current_thread()?;
let ctx = unsafe { jni::objects::JObject::from_raw(ctx.context().cast()) };
let cache_dir = env
.call_method(ctx, "getFilesDir", "()Ljava/io/File;", &[])?
.l()?;
let cache_dir: jni::objects::JString = env
.call_method(&cache_dir, "toString", "()Ljava/lang/String;", &[])?
.l()?
.try_into()?;
let cache_dir = env.get_string(&cache_dir)?;
let cache_dir = cache_dir.to_str()?;
Ok(std::path::PathBuf::from(cache_dir).join("config.json"))
}
fn load_config_map(config_path: &std::path::PathBuf) -> HashMap<String, serde_json::Value> {
match std::fs::read_to_string(config_path) {
Ok(contents) => serde_json::from_str(&contents).unwrap_or_default(),
Err(_) => HashMap::new(),
}
}
fn save_config_map(config: &HashMap<String, serde_json::Value>) -> color_eyre::Result<()> {
let config_path = get_config_path().expect("Could not get config file path.");
if let Some(parent) = config_path.parent() {
info!("Creating config directory: {}", parent.display());
std::fs::create_dir_all(parent)?;
}
let contents = serde_json::to_string_pretty(config)?;
info!("Writing config to {}", config_path.display());
std::fs::write(&config_path, contents)?;
Ok(())
}
fn config_get_default(key: &str) -> Option<serde_json::Value> {
let default_config = platform_default_config();
default_config
.get(key)
.cloned()
.or(super::global_default_config().get(key).cloned())
}
fn platform_default_config() -> HashMap<String, serde_json::Value> {
serde_json::json!({})
.as_object()
.unwrap()
.clone()
.into_iter()
.collect()
}
-129
View File
@@ -1,129 +0,0 @@
/// Stub implementation of the platform interface, so that we can
/// `cargo check` without any --feature flags.
use crate::{app::SharedState, effects::AudioProcessor, Reactivity};
use color_eyre::eyre::Error;
use futures_channel::mpsc::UnboundedReceiver;
use mumble_web2_common::{ProxyOverrides, ServerStatus};
use std::future::Future;
pub struct StubPlatform;
impl super::PlatformInterface for StubPlatform {
type AudioSystem = StubAudioSystem;
type ConfigSystem = StubConfigSystem;
fn init_logging() {
panic!("stubbed platform")
}
fn request_permissions() {
panic!("stubbed platform")
}
fn network_connect(
_address: String,
_username: String,
_event_rx: &mut UnboundedReceiver<crate::app::Command>,
_overrides: &ProxyOverrides,
_state: SharedState<impl Reactivity>,
) -> impl Future<Output = Result<(), Error>> {
async { panic!("stubbed platform") }
}
fn get_status(
_client: &reqwest::Client,
_address: &str,
) -> impl Future<Output = color_eyre::Result<ServerStatus>> {
async { panic!("stubbed platform") }
}
fn load_proxy_overrides() -> impl Future<Output = color_eyre::Result<ProxyOverrides>> {
async { panic!("stubbed platform") }
}
fn sleep(_duration: std::time::Duration) -> impl Future<Output = ()> {
async { panic!("stubbed platform") }
}
}
pub struct StubAudioSystem;
impl super::AudioSystemInterface for StubAudioSystem {
type AudioPlayer = StubAudioPlayer;
async fn new() -> Result<Self, Error> {
panic!("stubbed platform")
}
fn set_processor(&self, _processor: AudioProcessor) {
panic!("stubbed platform")
}
fn start_recording(
&mut self,
_each: impl FnMut(Vec<u8>, bool) + Send + 'static,
) -> Result<(), Error> {
panic!("stubbed platform")
}
fn create_player(&mut self) -> Result<Self::AudioPlayer, Error> {
panic!("stubbed platform")
}
}
pub struct StubAudioPlayer;
impl super::AudioPlayerInterface for StubAudioPlayer {
fn play_opus(&mut self, _payload: &[u8]) {
panic!("stubbed platform")
}
}
#[derive(Clone)]
pub struct StubConfigSystem;
impl super::ConfigSystemInterface for StubConfigSystem {
fn new() -> Result<Self, Error> {
panic!("stubbed platform")
}
fn config_get<T>(&self, key: &str) -> Option<T>
where
T: serde::de::DeserializeOwned,
{
panic!("stubbed platform")
}
fn config_set<T>(&self, key: &str, value: &T)
where
T: serde::Serialize,
{
panic!("stubbed platform")
}
}
#[allow(unused)]
pub struct SpawnHandle;
impl SpawnHandle {
#[allow(unused)]
pub fn spawn<F>(&self, _future: F)
where
F: Future<Output = ()> + 'static,
{
panic!("stubbed platform")
}
#[allow(unused)]
pub fn current() -> Self {
SpawnHandle
}
}
#[allow(unused)]
pub fn spawn<F>(_future: F)
where
F: Future<Output = ()> + 'static,
{
panic!("stubbed platform")
}
-11
View File
@@ -1,11 +0,0 @@
mod app;
mod effects;
mod imp;
mod mainloop;
mod msghtml;
pub use app::*;
pub use imp::*;
pub use mainloop::*;
pub use mime_guess;
pub use reqwest;
-5
View File
@@ -3,10 +3,5 @@ name = "mumble-web2-common"
version = "0.1.0"
edition = "2021"
[features]
networking = ["dep:tokio", "dep:color-eyre"]
[dependencies]
serde = { workspace = true }
tokio = { version = "1", features = ["net", "time"], optional = true }
color-eyre = { version = "0.6", optional = true }
+2 -60
View File
@@ -1,8 +1,9 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
pub struct ProxyOverrides {
pub struct ClientConfig {
pub proxy_url: Option<String>,
pub status_url: Option<String>,
pub cert_hash: Option<Vec<u8>>,
pub any_server: bool,
}
@@ -16,62 +17,3 @@ pub struct ServerStatus {
pub max_users: Option<u32>,
pub bandwidth: Option<u32>,
}
/// Mumble UDP ping protocol.
///
/// Send a 12-byte packet: 4 zero bytes + 8-byte identifier.
/// Receive a 24-byte response: 4 bytes version + 8 bytes identifier echo
/// + 4 bytes current_users + 4 bytes max_users + 4 bytes bandwidth.
#[cfg(feature = "networking")]
pub async fn ping_server(address: &str, port: u16) -> color_eyre::Result<ServerStatus> {
use color_eyre::eyre::{bail, eyre};
use std::net::ToSocketAddrs;
use std::time::Duration;
use tokio::net::UdpSocket;
let dest = format!("{}:{}", address, port)
.to_socket_addrs()?
.next()
.ok_or_else(|| eyre!("could not resolve address"))?;
let bind_addr = if dest.is_ipv6() { "[::]:0" } else { "0.0.0.0:0" };
let socket = UdpSocket::bind(bind_addr).await?;
socket.connect(dest).await?;
let request_id: u64 = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_nanos() as u64;
let mut buf = [0u8; 12];
buf[4..12].copy_from_slice(&request_id.to_be_bytes());
socket.send(&buf).await?;
let mut response = [0u8; 24];
let timeout = tokio::time::timeout(Duration::from_secs(2), socket.recv(&mut response)).await;
match timeout {
Ok(Ok(len)) if len >= 24 => {
let version_major = response[0] as u32;
let version_minor = response[1] as u32;
let version_patch = response[2] as u32;
let users =
u32::from_be_bytes([response[12], response[13], response[14], response[15]]);
let max_users =
u32::from_be_bytes([response[16], response[17], response[18], response[19]]);
let bandwidth =
u32::from_be_bytes([response[20], response[21], response[22], response[23]]);
Ok(ServerStatus {
success: true,
version: Some((version_major, version_minor, version_patch)),
users: Some(users),
max_users: Some(max_users),
bandwidth: Some(bandwidth),
})
}
Ok(Ok(_)) => bail!("ping response too short"),
Ok(Err(e)) => Err(e.into()),
Err(_) => bail!("ping timed out"),
}
}
+1 -1
View File
@@ -1,4 +1,4 @@
proxy_url = "https://127.0.0.1:4433/proxy"
public_url = "https://127.0.0.1:4433"
https_listen_address = "127.0.0.1:4433"
http_listen_address = "127.0.0.1:8080"
mumble_server_url = "[SERVER_URL_HERE]"
+9 -7
View File
@@ -1,12 +1,14 @@
localhost:64444 {
tls internal
tls internal
# Proxy /config path to mumble-web2-proxy
reverse_proxy /overrides http://127.0.0.1:4400
# Proxy /config path to mumble-web2-proxy
reverse_proxy /config http://127.0.0.1:4400
# Proxy /status path to mumble-web2-proxy
reverse_proxy /status http://127.0.0.1:4400
# Proxy /status path to mumble-web2-proxy
reverse_proxy /status http://127.0.0.1:4400
# Proxy root path to dx-serve
reverse_proxy http://127.0.0.1:8080
# Proxy root path to dx-serve
reverse_proxy http://127.0.0.1:8080
}
-43
View File
@@ -1,43 +0,0 @@
FROM rust:trixie
ARG ANDROID_CLI_TOOLS_VERSION=13114758
# Install android rust toolchains
RUN rustup target add aarch64-linux-android armv7-linux-androideabi i686-linux-android x86_64-linux-android
# Install debian dependencies
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
nodejs \
ca-certificates \
curl \
unzip \
default-jdk
# Install android commandline tools (required to install the sdk)
RUN cd /tmp && \
curl -o commandlinetools-linux.zip "https://dl.google.com/android/repository/commandlinetools-linux-${ANDROID_CLI_TOOLS_VERSION}_latest.zip" && \
unzip commandlinetools-linux.zip && \
mkdir -p /opt/android-tools/cmdline-tools && \
cp -r cmdline-tools /opt/android-tools/cmdline-tools/latest
# Install required android tools
RUN yes | /opt/android-tools/cmdline-tools/latest/bin/sdkmanager --install "platform-tools" "platforms;android-36.1" "build-tools;36.1.0" "ndk;29.0.14206865" "cmake;3.31.6"
# Install cargo binstall
RUN curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash
# Install dioxus-cli
RUN cargo binstall dioxus-cli@0.7.3
# Install bindgen-cli
RUN cargo binstall bindgen-cli
# Set required env vars
ENV ANDROID_HOME="/opt/android-tools/"
ENV NDK_HOME="$ANDROID_HOME/ndk/29.0.14206865"
ENV PATH="$PATH:$ANDROID_HOME/platform-tools"
ENV PATH="$PATH:/opt/android-tools/cmake/3.31.6/bin/"
ENV LLVM_CONFIG_PATH="/opt/android-tools/ndk/29.0.14206865/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-config"
-21
View File
@@ -1,21 +0,0 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$(dirname "$(realpath "${BASH_SOURCE[0]}")")"
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
IMAGE_NAME="mumble-web2/android-release-builder:local"
TARGET="${1:-aarch64-linux-android}"
echo "==> Building Android builder Docker image..."
docker build -t "$IMAGE_NAME" -f "$SCRIPT_DIR/android-release-builder.Dockerfile" "$PROJECT_ROOT"
echo "==> Building Android APK (target: $TARGET)..."
docker run --rm \
-v "$PROJECT_ROOT:/app" \
-w /app \
"$IMAGE_NAME" \
dx build --platform android --target "$TARGET" --release -p mumble-web2-gui
echo "==> Done! APK should be at:"
echo " target/dx/mumble-web2-gui/release/android/app/app/build/outputs/apk/debug/app-debug.apk"
+4 -4
View File
@@ -5,7 +5,7 @@ services:
- "64444:64444/tcp"
- "64444:64444/udp"
volumes:
- ./Caddyfile:/etc/caddy/Caddyfile:z
- ./Caddyfile:/etc/caddy/Caddyfile
#- caddy_data:/data
#- caddy_config:/config
depends_on:
@@ -20,7 +20,7 @@ services:
# volumes:
# - ..:/app
# environment:
# - MUMBLE_WEB2_PROXY_OVERRIDES_URL=https://localhost:64444/overrides
# - MUMBLE_WEB2_GUI_CONFIG_URL=https://localhost:64444/config
# stdin_open: true
# tty: true
# command: >
@@ -35,8 +35,8 @@ services:
image: rust:latest
working_dir: /app
volumes:
- ..:/app:z
- ./proxy-config.toml:/app/config.toml:z
- ..:/app
- ./proxy-config.toml:/app/config.toml
ports:
- "4433:4433/tcp"
- "4433:4433/udp"
+1
View File
@@ -1,3 +1,4 @@
public_url = "https://localhost:64444"
proxy_url = "https://127.0.0.1:4433/proxy"
https_listen_address = "127.0.0.1:4433"
http_listen_address = "127.0.0.1:4400"
+3 -4
View File
@@ -44,12 +44,11 @@ RUN choco install rustup.install -y --no-progress
RUN rustup toolchain install stable-x86_64-pc-windows-msvc
RUN rustup default stable-x86_64-pc-windows-msvc
# Install cargo binstall
RUN Set-ExecutionPolicy Unrestricted -Scope Process; `
iex (Invoke-WebRequest "https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.ps1" -UseBasicParsing).Content
iex (iwr "https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.ps1").Content
SHELL ["C:\\Program Files (x86)\\Microsoft Visual Studio\\2022\\BuildTools\\Common7\\Tools\\VsDevCmd.bat", "&&", "powershell.exe", "-NoLogo", "-ExecutionPolicy", "Bypass"]
# Install dioxus-cli
RUN cargo binstall dioxus-cli@0.7.3
RUN cargo binstall dioxus-cli --version 0.7.2
ENTRYPOINT ["C:\\Program Files (x86)\\Microsoft Visual Studio\\2022\\BuildTools\\Common7\\Tools\\VsDevCmd.bat", "&&", "powershell.exe", "-NoLogo", "-ExecutionPolicy", "Bypass"]
+115 -16
View File
@@ -4,31 +4,130 @@ version = "0.1.0"
edition = "2021"
[dependencies]
dioxus = { version = "0.7.2" }
# Web Dependencies
# ================
dioxus-web = { version = "0.7.2", optional = true }
mumble-web2-client = { version = "0.1.0", path = "../client" }
mumble-web2-common = { version = "0.1.0", path = "../common" }
color-eyre = "^0.6.3"
wasm-bindgen = { version = "^0.2.92", optional = true }
wasm-bindgen-futures = { version = "^0.4.42", optional = true }
wasm-streams = { version = "^0.4.0", optional = true }
serde-wasm-bindgen = { version = "^0.6.5", optional = true }
js-sys = { version = "^0.3.70", optional = true }
web-sys = { version = "^0.3.72", features = [
"WebTransport",
"console",
"WebTransportOptions",
"WebTransportBidirectionalStream",
"WebTransportSendStream",
"WebTransportReceiveStream",
"Navigator",
"MediaDevices",
"AudioDecoder",
"AudioDecoderInit",
"AudioData",
"AudioEncoderConfig",
"AudioDecoderConfig",
"EncodedAudioChunk",
"EncodedAudioChunkInit",
"EncodedAudioChunkType",
"CodecState",
"MediaStreamTrackGenerator",
"MediaStreamTrackGeneratorInit",
"AudioContext",
"AudioContextOptions",
"MediaStream",
"GainNode",
"MediaStreamAudioSourceNode",
"BaseAudioContext",
"AudioDestinationNode",
"AudioWorkletNode",
"AudioWorklet",
"AudioWorkletProcessor",
"MediaStreamConstraints",
"WorkletOptions",
"AudioEncoder",
"AudioEncoderInit",
"AudioDataInit",
"HtmlAnchorElement",
"Url",
"Blob",
"AudioDataCopyToOptions",
"AudioSampleFormat",
"Storage",
], optional = true }
gloo-timers = { version = "^0.3.0", features = ["futures"], optional = true }
tracing-web = { version = "^0.1.3", optional = true }
# Platform Integration
# ====================
# rfd only supports windows, macos, linux, and wasm32. No support for Android or iOS
[target.'cfg(any(target_os = "linux", target_os = "windows", target_os = "macos", target_arch = "wasm32"))'.dependencies]
rfd = { git = "https://github.com/PolyMeilex/rfd.git", version = "^0.16.0", default-features = false, optional = true }
# Desktop Dependecies
# ===================
tokio = { version = "^1.41.1", features = ["net", "rt"], optional = true }
tokio-rustls = { version = "^0.26.0", optional = true }
opus = { version = "0.3.0", optional = true }
cpal = { version = "0.15.3", optional = true }
dasp_ring_buffer = { version = "0.11.0", optional = true }
# Base Dependencies
# ================
dioxus = { version = "0.7.2" }
once_cell = "1.19.0"
asynchronous-codec = { workspace = true }
futures = "^0.3.30"
merge-io = "^0.3.0"
mumble-protocol = { workspace = true }
serde_json = "1"
tokio-util = { version = "^0.7.11", features = ["codec", "compat"] }
byteorder = "1.5"
ogg = "^0.9.1"
ordermap = "^0.5.3"
html-purifier = "^0.3.0"
markdown = "^0.3.0"
futures-channel = "^0.3.30"
mumble-web2-common = { workspace = true }
serde = { workspace = true }
tracing-subscriber = { version = "^0.3.18", features = ["ansi"] }
tracing = "^0.1.40"
color-eyre = "^0.6.3"
crossbeam-queue = "^0.3.11"
lol_html = "^2.2.0"
rfd = { git = "https://github.com/samsartor/rfd.git", version = "^0.16.0", default-features = false }
base64 = "^0.22"
mime_guess = "^2.0.5"
async_cell = "^0.2.3"
reqwest = { version = "^0.12.22", features = ["json"] }
dioxus-asset-resolver = "0.7.2"
# Denoising
# =========
deep_filter = { git = "https://github.com/Rikorose/DeepFilterNet.git", rev = "d375b2d8309e0935d165700c91da9de862a99c31", features = ["tract"] }
crossbeam = "0.8.4"
[patch.crates-io]
tract-hir = "=0.12.4"
tract-core = "=0.12.4"
tract-onnx = "=0.12.4"
tract-pulse = "=0.12.4"
[features]
web = [
"dioxus/web",
"dioxus-web",
"mumble-web2-client/web",
"rfd",
"wasm-bindgen",
"wasm-bindgen-futures",
"wasm-streams",
"serde-wasm-bindgen",
"js-sys",
"web-sys",
"gloo-timers",
"tracing-web",
"deep_filter/wasm",
]
desktop = [
"dioxus/desktop",
"mumble-web2-client/desktop",
"tokio",
"tokio-rustls",
"tracing-subscriber/env-filter",
"opus",
"cpal",
"dasp_ring_buffer",
"rfd/xdg-portal",
]
mobile = [
"dioxus/mobile",
"mumble-web2-client/mobile"
"rfd/tokio",
]
+2 -4
View File
@@ -8,8 +8,6 @@ out_dir = "dist"
# resource (public) file folder
asset_dir = "public"
android_manifest = "build/AndroidManifest.xml"
[web.app]
# HTML title tag content
title = "Mumble Web 2"
@@ -25,7 +23,7 @@ watch_path = ["src", "assets"]
# CSS style file
style = []
# Javascript code file
script = ["loader.js"]
script = []
[web.resource.dev]
# serve: [dev-server] only
@@ -35,7 +33,7 @@ style = []
script = []
[bundle]
identifier = "xyz.ohea.mumble_web_2"
identifier = "xyz.ohea.mumble-web-2"
publisher = "OheaCorp"
icon = [
"icons/32x32.png",
+11 -140
View File
@@ -16,7 +16,6 @@ body {
}
#main {
visibility: visible;
height: 100vh;
display: flex;
flex-direction: column;
@@ -84,44 +83,6 @@ a:visited {
}
}
.channel_header {
display: flex;
flex-direction: row;
align-items: center;
}
.channel_arrow {
width: 1em;
text-align: center;
margin-right: 0.25rem;
}
.channel_arrow--placeholder {
pointer-events: none;
visibility: hidden;
}
/* The whole right side of the row is the dblclick target */
.channel_row_click {
flex: 1;
padding: 0.1rem 0.25rem 0.1rem 0.5rem;
cursor: pointer;
}
/* Hover highlight for whole row area (title + blank space) */
.channel_row_click:hover {
background-color: var(--channel-hover-bg, #222); /* pick your color */
}
/* still keep text non-selectable if desired */
.channel_details {
-webkit-user-select: none;
-ms-user-select: none;
user-select: none;
}
.channel {
&_details {
flex: 0 0 100%;
@@ -207,68 +168,26 @@ a:visited {
background-color: oklch(0.53 0.1431 264.18);
border-radius: 50%;
aspect-ratio: 1 / 1;
flex-shrink: 0;
.material-symbols-outlined {
font-variation-settings: 'FILL' 1, 'wght' 700, 'GRAD' 0, 'opsz' 48;
}
}
.button_row {
display: flex;
gap: clamp(4px, 1vw, 10px);
align-items: center;
flex-wrap: nowrap;
min-height: 0;
gap: 10px;
.spacer {
flex-grow: 1;
flex-shrink: 1;
min-width: 0;
}
.connection_status {
display: flex;
flex-direction: column;
min-width: 0;
flex-shrink: 1;
.material-symbols-outlined {
font-variation-settings: 'FILL' 1, 'wght' 700, 'GRAD' 0, 'opsz' 48;
vertical-align: middle;
}
}
.user_info {
display: flex;
flex-direction: column;
min-width: 0;
overflow: hidden;
flex-shrink: 1;
.user_name {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.user_data {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
}
}
.toggle_button {
padding: clamp(4px, 0.5vw, 8px);
padding: 8px;
height: 100%;
aspect-ratio: 1 / 1;
flex-shrink: 0;
background-color: unset;
border: solid rgb(255 255 255 / 0.1) clamp(1px, 0.3vw, 3px);
border-radius: clamp(4px, 0.8vw, 10px);
border: solid rgb(255 255 255 / 0.1) 3px;
border-radius: 10px;
color: rgb(255 255 255 / 50%);
transition: all 0.5s ease-in-out;
@@ -281,6 +200,7 @@ a:visited {
.material-symbols-outlined {
font-variation-settings: 'FILL' 1, 'wght' 700, 'GRAD' 0, 'opsz' 48;
vertical-align: middle;
font-size: 35px;
}
}
@@ -325,60 +245,16 @@ a:visited {
}
&_control_box {
padding: clamp(6px, 0.8vw, 12px);
margin: clamp(6px, 0.8vw, 12px);
padding: 16px;
margin: 16px;
background-color: var(--light-bg-color);
border-radius: clamp(6px, 1vw, 10px);
border-radius: 10px;
overflow: hidden;
grid-area: control;
display: flex;
gap: clamp(4px, 0.8vw, 8px);
gap: 10px;
flex-direction: column;
// Dynamic font sizing for control elements
--control-icon-size: clamp(16px, 2.5vw, 30px);
--control-text-size: clamp(12px, 2vw, 25px);
--control-small-text-size: clamp(10px, 1.5vw, 20px);
--user-icon-size: clamp(24px, 4vw, 45px);
--toggle-icon-size: clamp(18px, 3vw, 35px);
.connection_status {
.material-symbols-outlined {
font-size: var(--control-icon-size);
}
.status_text {
font-size: var(--control-text-size);
}
.channel_text {
font-size: var(--control-small-text-size);
}
}
.user_edit_button {
.material-symbols-outlined {
font-size: var(--user-icon-size);
}
}
.user_info {
.user_name {
font-size: var(--control-text-size);
}
.user_data {
font-size: var(--control-small-text-size);
}
}
.toggle_button {
.material-symbols-outlined {
font-size: var(--toggle-icon-size);
}
}
hr {
margin: 0;
}
}
}
@@ -403,11 +279,6 @@ a:visited {
color: #b3c6b4;
}
&_version {
color: var(--txt-color);
font-weight: normal;
}
&_bttn {
font-weight: bold;
font-size: large;
@@ -431,4 +302,4 @@ a:visited {
color: red;
}
}
}
}
@@ -1,7 +1,7 @@
const SAMPLE_RATE = 48000;
const PACKET_SAMPLES = 960;
class RustMicWorklet extends AudioWorkletProcessor {
class RustWorklet extends AudioWorkletProcessor {
constructor(options) {
super();
this.module = options.processorOptions;
@@ -31,7 +31,7 @@ class RustMicWorklet extends AudioWorkletProcessor {
}
this.buffer_offset -= PACKET_SAMPLES;
this.timestamp = null;
}
}
process(inputs) {
//console.log(inputs);
@@ -60,44 +60,4 @@ class RustMicWorklet extends AudioWorkletProcessor {
}
};
class RustSpeakerWorklet extends AudioWorkletProcessor {
constructor() {
super();
this.queue = [];
this.readIndex = 0;
this.port.onmessage = (event) => {
this.queue.push(event.data)
};
}
process(inputs, outputs) {
if (this.queue.length) {
console.log(this.queue[0].samples.length, outputs[0][0].length);
}
const output = outputs[0];
for (let i = 0; i < output[0].length; i++) {
if (!this.queue.length) {
return true;
}
const current = this.queue[0];
for (let ch = 0; ch < output.length; ch++) {
output[ch][i] = current.samples[this.readIndex];
}
this.readIndex++;
if (this.readIndex >= current.samples.length) {
this.queue.shift();
this.readIndex = 0;
}
}
return true;
}
};
registerProcessor("rust_mic_worklet", RustMicWorklet);
registerProcessor("rust_speaker_worklet", RustSpeakerWorklet);
registerProcessor("rust_mic_worklet", RustWorklet);
+38
View File
@@ -0,0 +1,38 @@
use std::path::Path;
use std::process::Command;
fn main() {
// Define the target directory and file
let assets_dir = "assets";
let target_file = format!("{}/DeepFilterNet3_ll_onnx.tar.gz", assets_dir);
let target_path = Path::new(&target_file);
// Check if the file already exists
if target_path.exists() {
println!("cargo:warning=DeepFilterNet model already exists at {}", target_file);
return;
}
println!("cargo:warning=Downloading DeepFilterNet model to {}...", target_file);
// Download the file using curl
let url = "https://github.com/Rikorose/DeepFilterNet/raw/refs/heads/main/models/DeepFilterNet3_ll_onnx.tar.gz";
let status = Command::new("curl")
.args([
"-L", // Follow redirects
"-o", &target_file, // Output file
url,
])
.status()
.expect("Failed to execute curl command. Make sure curl is installed.");
if !status.success() {
panic!("Failed to download DeepFilterNet model from {}", url);
}
println!("cargo:warning=Successfully downloaded DeepFilterNet model to {}", target_file);
// Rerun this build script if the target file is deleted
println!("cargo:rerun-if-changed={}", target_file);
}
-32
View File
@@ -1,32 +0,0 @@
<?xml version="1.0" encoding="utf-8" ?>
<!--
Borrowed from https://github.com/irh/audio-app/blob/main/apps/dioxus/AndroidManifest.xml
-->
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-feature android:name="android.hardware.audio.low_latency" android:required="false" />
<uses-feature android:name="android.hardware.audio.output" android:required="false" />
<uses-feature android:name="android.hardware.audio.pro" android:required="false" />
<uses-feature android:name="android.hardware.microphone" android:required="false" />
<application android:hasCode="true" android:supportsRtl="true" android:icon="@mipmap/ic_launcher"
android:extractNativeLibs="true"
android:allowNativeHeapPointerTagging="false"
android:label="@string/app_name"
android:theme="@style/AppTheme"
android:networkSecurityConfig="@xml/network_security_config">
<activity android:configChanges="orientation|screenLayout|screenSize|keyboardHidden" android:exported="true"
android:label="@string/app_name" android:name="dev.dioxus.main.MainActivity">
<meta-data android:name="android.app.lib_name" android:value="dioxusmain" />
<meta-data android:name="android.app.func_name" android:value="ANativeActivity_onCreate" />
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
-68
View File
@@ -1,68 +0,0 @@
// Loading screen that displays while WASM loads
(function() {
// Create and inject loader styles immediately (head exists)
var style = document.createElement('style');
style.textContent =
'.wasm-loader {' +
'position: fixed;' +
'top: 0;' +
'left: 0;' +
'width: 100%;' +
'height: 100%;' +
'background-color: oklch(0.15 0.01 338.64);' +
'display: flex;' +
'align-items: center;' +
'justify-content: center;' +
'z-index: 9999;' +
'transition: opacity 0.3s ease-out;' +
'}' +
'.wasm-loader.hidden {' +
'opacity: 0;' +
'pointer-events: none;' +
'}' +
'.wasm-spinner {' +
'width: 48px;' +
'height: 48px;' +
'border: 4px solid rgba(123, 173, 159, 0.2);' +
'border-top-color: #7bad9f;' +
'border-radius: 50%;' +
'animation: wasm-spin 1s linear infinite;' +
'}' +
'@keyframes wasm-spin {' +
'to { transform: rotate(360deg); }' +
'}' +
'#main {' +
'background-color: oklch(0.15 0.01 338.64);' +
'}';
document.head.appendChild(style);
function init() {
// Create loader element
var loader = document.createElement('div');
loader.className = 'wasm-loader';
loader.innerHTML = '<div class="wasm-spinner"></div>';
document.body.appendChild(loader);
// Watch for Dioxus to mount content in #main
var observer = new MutationObserver(function(mutations, obs) {
var main = document.getElementById('main');
if (main && main.children.length > 0) {
loader.classList.add('hidden');
setTimeout(function() { loader.remove(); }, 300);
obs.disconnect();
}
});
observer.observe(document.body, {
childList: true,
subtree: true
});
}
// Wait for body to exist
if (document.body) {
init();
} else {
document.addEventListener('DOMContentLoaded', init);
}
})();
+752
View File
@@ -0,0 +1,752 @@
#![allow(non_snake_case)]
use dioxus::prelude::*;
use mime_guess::Mime;
use mumble_web2_common::{ClientConfig, ServerStatus};
use ordermap::OrderSet;
use std::collections::HashMap;
use crate::imp;
pub type ChannelId = u32;
pub type UserId = u32;
pub enum ConnectionState {
Disconnected,
Connecting,
Connected,
Failed(String),
}
#[derive(Debug)]
pub enum Command {
Connect {
address: String,
username: String,
config: ClientConfig,
},
SendChat {
markdown: String,
channels: Vec<ChannelId>,
},
SendFile {
bytes: Vec<u8>,
name: String,
mime: Option<Mime>,
channels: Vec<ChannelId>,
},
SetMute {
mute: bool,
},
SetDeaf {
deaf: bool,
},
EnterChannel {
channel: ChannelId,
user: UserId,
},
UpdateMicEffects {
denoise: bool,
},
Disconnect,
}
use Command::*;
use ConnectionState::*;
#[derive(Default)]
pub struct ChannelState {
pub name: String,
pub children: OrderSet<ChannelId>,
pub users: OrderSet<UserId>,
pub parent: Option<ChannelId>,
}
#[derive(Default)]
pub struct UserState {
pub name: String,
pub channel: ChannelId,
pub deaf: bool,
pub mute: bool,
pub self_deaf: bool,
pub self_mute: bool,
}
impl UserState {
pub fn icon(&self) -> UserIcon {
match (self.mute || self.self_mute, self.deaf || self.self_deaf) {
(false, false) => UserIcon::Normal,
(true, false) => UserIcon::Muted,
(_, true) => UserIcon::Deafened,
}
}
}
pub struct Chat {
pub raw: String,
pub dangerous_html: String,
pub sender: Option<UserId>,
}
#[derive(Default)]
pub struct ServerState {
pub channels: HashMap<ChannelId, ChannelState>,
pub users: HashMap<UserId, UserState>,
pub chat: Vec<Chat>,
pub session: Option<UserId>,
}
impl ServerState {
pub fn this_user(&self) -> Option<&UserState> {
self.users.get(&self.session?)
}
}
pub struct State {
pub status: GlobalSignal<ConnectionState>,
pub server: GlobalSignal<ServerState>,
}
pub static STATE: State = State {
status: Signal::global(|| Disconnected),
server: Signal::global(|| Default::default()),
};
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum UserIcon {
Normal,
Muted,
Deafened,
None,
}
impl UserIcon {
pub fn url(self) -> Option<Asset> {
// speaker from https://www.svgrepo.com/collection/ikono-bold-line-icons/
// mic from https://www.svgrepo.com/collection/hashicorp-line-interface-icons/
use UserIcon::*;
Some(match self {
Normal => asset!("assets/mic-svgrepo-com.svg"),
Muted => asset!("assets/mic-off-svgrepo-com.svg"),
Deafened => asset!("assets/speaker-muted-svgrepo-com.svg"),
None => return Option::None,
})
}
}
#[component]
pub fn UserPill(name: String, icon: UserIcon, isself: bool) -> Element {
let color = match icon {
UserIcon::Normal => "var(--accent-normal)",
UserIcon::Muted => "var(--accent-muted)",
UserIcon::Deafened => "var(--accent-deafened)",
UserIcon::None => "var(--accent-normal)",
};
rsx!(
div {
class: match isself { true => "userpil is_self", false => "userpil" },
style: "background-color: {color}",
{ icon.url().map(|url| rsx!(img { src: url })) }
"\u{00A0}{name}\u{00A0}"
}
)
}
#[component]
pub fn User(id: UserId) -> Element {
let server = STATE.server.read();
match server.users.get(&id) {
Some(state) => rsx!(UserPill {
name: state.name.clone(),
icon: state.icon(),
isself: server.session.unwrap() == id,
}),
None => rsx!(UserPill {
name: format!("unknown user ({id})"),
icon: UserIcon::None,
isself: false,
}),
}
}
#[component]
pub fn Channel(id: ChannelId) -> Element {
let net: Coroutine<Command> = use_coroutine_handle();
let server = STATE.server.read();
let user = server.session.unwrap();
let Some(state) = server.channels.get(&id) else {
return rsx!("missing channel {id}");
};
rsx!(
details {
class: "channel_details",
open: true,
summary {
span {
role: "button",
ondoubleclick: move |evt| {
evt.stop_propagation();
evt.prevent_default();
net.send(EnterChannel { channel: id, user })
},
"{state.name}"
}
}
if state.users.len() + state.children.len() > 0 {
div {
class: "channel_children",
for id in state.users.iter() {
User { id: *id }
}
for child in state.children.iter() {
Channel { id: *child }
}
}
}
}
)
}
pub fn pick_and_send_file(net: &Coroutine<Command>) {
let channels = if let Some(user) = STATE.server.read().this_user() {
vec![user.channel]
} else {
return;
};
let dialog = rfd::AsyncFileDialog::new().pick_file();
let sender = net.tx();
spawn(async move {
let Some(handle) = dialog.await else { return };
let name = handle.file_name();
let bytes = handle.read().await;
let mime = mime_guess::from_path(&name).first();
let _ = sender.unbounded_send(SendFile {
bytes,
name,
mime,
channels,
});
});
}
#[component]
pub fn ChatView() -> Element {
let net: Coroutine<Command> = use_coroutine_handle();
let server = STATE.server.read();
let mut draft = use_signal(|| "".to_string());
let mut do_send = move || {
if let Some(user) = STATE.server.read().this_user() {
net.send(SendChat {
markdown: draft.write().split_off(0),
channels: vec![user.channel],
});
}
};
rsx!(
div {
class: "chat_panel",
div {
class: "chat_history",
for chat in server.chat.iter() {
div {
class: "chat_message",
if let Some(sender) = chat.sender.and_then(|u| server.users.get(&u)) {
UserPill {
name: sender.name.clone(),
icon: UserIcon::None,
isself: false,
}
}
span {
dangerous_inner_html: "{chat.dangerous_html}",
}
}
}
}
div {
class: "chat_box_wrapper",
div {
class: "chat_box",
input {
placeholder: "say something",
value: "{draft.read()}",
oninput: move |evt| draft.set(evt.value().clone()),
onkeypress: move |evt: Event<KeyboardData>| {
if evt.code() == Code::Enter && evt.modifiers().is_empty() {
do_send();
}
}
}
div {
span {
onclick: move |_| pick_and_send_file(&net),
class: "material-symbols-outlined",
style: "color: rgba(255, 255, 255, 0.5); font-variation-settings: 'FILL' 1, 'wght' 700, 'GRAD' 0, 'opsz' 48; vertical-align: middle; font-size: 35px; user-select: none;",
"attach_file",
}
}
div {
span {
onclick: move |_| do_send(),
class: "material-symbols-outlined",
style: "color: rgba(255, 255, 255, 0.5); font-variation-settings: 'FILL' 1, 'wght' 700, 'GRAD' 0, 'opsz' 48; vertical-align: middle; font-size: 35px; user-select: none;",
"send",
}
}
}
//button {
// onclick: move |_| do_send(),
// "Send"
//}
}
}
)
}
#[component]
pub fn ControlView(config: Resource<ClientConfig>) -> Element {
let net: Coroutine<Command> = use_coroutine_handle();
let status = &STATE.status;
let server = STATE.server.read();
let Some(&UserState {
deaf,
self_deaf,
mute,
self_mute,
ref name,
channel,
..
}) = server.this_user()
else {
return rsx!();
};
let current_channel_name = server.channels[&channel].name.clone();
let proxy_url = config
.read_unchecked()
.as_ref()
.and_then(|gui_config| gui_config.proxy_url.clone());
let connecting_color = "yellow";
let connected_color = "oklch(0.55 0.1184 141.35)";
let disconnected_color = "gray";
let failed_color = "red";
let connection_status = match &*status.read() {
Connecting => rsx! {
div {
style: "color: \"{connecting_color}\";",
span {
class: "material-symbols-outlined",
style: "vertical-align: middle; font-size: 30px;",
"signal_cellular_alt_2_bar"
}
span {
style: "width: 5px; display: inline-block;"
}
span {
style: "vertical-align: middle; font-size: 30px;",
"Connecting"
}
}
},
Connected => rsx! {
div {
div {
style: "color: \"{connected_color}\";",
span {
class: "material-symbols-outlined",
style: "vertical-align: middle; font-size: 30px;",
"signal_cellular_alt"
}
span {
style: "width: 5px; display: inline-block;"
}
span {
style: "vertical-align: middle; font-size: 25px;",
"Connected"
}
}
div {
span { style: "width: 3px; display: inline-block;"}
span { "{current_channel_name}" }
if let Some(proxy_url) = proxy_url {
span { "" }
span { "{proxy_url}" }
}
}
}
},
Disconnected => rsx! {
div {
style: "color: \"{disconnected_color}\";",
span {
class: "material-symbols-outlined",
style: "vertical-align: middle;",
"signal_disconnected"
}
span {
style: "width: 5px; display: inline-block;"
}
span {
style: "vertical-align: middle;",
"Disconnected"
}
}
},
Failed(_) => rsx! {
div {
style: "color: \"{failed_color}\";",
span {
class: "material-symbols-outlined",
style: "vertical-align: middle;",
"error"
}
span {
style: "width: 5px; display: inline-block;"
}
span {
style: "vertical-align: middle;",
"Failed"
}
}
},
};
let denoise = use_signal(|| false);
rsx!(
// Server control
div {
class: "button_row",
div {
{connection_status}
}
span { class: "spacer" }
button {
class: "toggle_button",
onclick: move |_| net.send(Disconnect),
span {
class: "material-symbols-outlined",
"signal_disconnected"
}
}
}
hr { style: "width: 100%;" }
// User control
div {
class: "button_row",
button {
class: "user_edit_button",
span {
class: "material-symbols-outlined",
style: "color: oklch(0.65 0.2245 28.06); font-size: 45px; font-variation-settings: 'FILL' 1, 'wght' 700, 'GRAD' 0, 'opsz' 48;",
"person_edit"
}
}
div {
div {
span { style: "font-size: 25px;", "{name}" }
}
div {
span { style: "font-size: 20px; color: gray;", "some data" }
}
}
span { class: "spacer" }
button {
class: match denoise() {
true => "toggle_button is_on",
false => "toggle_button",
},
role: "switch",
aria_checked: denoise(),
onclick: move |_| {
let new_denoise = !denoise();
*denoise.write_unchecked() = new_denoise;
net.send(UpdateMicEffects { denoise: new_denoise })
},
match denoise() {
true => rsx!(span { class: "material-symbols-outlined", "cadence"}),
false => rsx!(span { class: "material-symbols-outlined", "graphic_eq"}),
}
}
button {
class: match mute || self_mute {
true => "toggle_button is_on",
false => "toggle_button",
},
role: "switch",
aria_checked: mute || self_mute,
disabled: mute,
onclick: move |_| net.send(SetMute { mute: !self_mute }),
match mute || self_mute {
true => rsx!(span { class: "material-symbols-outlined", "mic_off"}),
false => rsx!(span { class: "material-symbols-outlined", "mic"}),
}
}
button {
class: match deaf || self_deaf {
true => "toggle_button in_on",
false => "toggle_button",
},
role: "switch",
aria_checked: deaf || self_deaf,
disabled: deaf,
onclick: move |_| net.send(SetDeaf { deaf: !self_deaf }),
match deaf || self_deaf {
true => rsx!(span { class: "material-symbols-outlined", "volume_off"}),
false => rsx!(span { class: "material-symbols-outlined", "volume_up"}),
}
}
}
)
}
#[component]
pub fn ServerView(config: Resource<ClientConfig>) -> Element {
let net: Coroutine<Command> = use_coroutine_handle();
let server = STATE.server.read();
let Some(&UserState {
deaf,
self_deaf,
mute,
self_mute,
..
}) = server.this_user()
else {
return rsx!();
};
rsx!(
div {
class: "server_grid",
div {
class: "server_channel_box",
for (id, state) in server.channels.iter() {
if state.parent.is_none() {
Channel { id: *id }
}
}
}
div {
class: "server_chat_box",
ChatView {}
}
div {
class: "server_control_box",
ControlView { config }
}
}
)
}
async fn get_status(
client: &reqwest::Client,
status_url: &str,
) -> color_eyre::Result<ServerStatus> {
Ok(client
.get(status_url)
.send()
.await?
.json::<ServerStatus>()
.await?)
}
#[component]
pub fn LoginView(config: Resource<ClientConfig>) -> Element {
let net: Coroutine<Command> = use_coroutine_handle();
let last_status = use_signal(|| None::<color_eyre::Result<ServerStatus>>);
use_resource(move || async move {
let Some(config) = config.read().clone() else {
return;
};
let Some(status_url) = config.status_url else {
return;
};
let client = reqwest::Client::new();
loop {
*last_status.write_unchecked() = Some(get_status(&client, &status_url).await);
imp::sleep(std::time::Duration::from_secs_f32(1.0)).await;
}
});
let mut address_input = use_signal(|| None::<String>);
let address = use_memo(move || {
if let Some(addr) = address_input() {
addr.clone()
} else {
config()
.and_then(|c| c.proxy_url.clone())
.unwrap_or_default()
}
});
let previous_username = imp::load_username();
let mut username = use_signal(|| previous_username.unwrap_or(String::new()));
let do_connect = move |_| {
//let _ = set_default_username(&username.read());
let _ = imp::set_default_username(&username.read());
net.send(Connect {
address: address.read().clone(),
username: username.read().clone(),
config: config.read().clone().unwrap_or_default(),
})
};
let status = &STATE.status;
let bottom = match &*status.read() {
Disconnected => rsx! {
button {
class: "login_bttn",
onclick: do_connect.clone(),
"Connect"
}
},
Connecting => rsx! {
div {
class: "login_bttn",
"Connecting..."
}
},
Failed(msg) => rsx!(
button {
class: "login_bttn",
onclick: do_connect.clone(),
"Reconnect"
}
div {
class: "login_error",
"Failed to connect:"
pre {
"{msg}"
}
}
),
Connected => unreachable!(),
};
rsx!(
div {
class: "login",
h1 {
"Mumble Web"
}
if config.read().as_ref().is_some_and(|cfg| cfg.any_server) {
div {
label {
for: "address-entry",
"Server Address:"
}
input {
id: "address-entry",
placeholder: "address",
value: "{address.read()}",
autofocus: "true",
oninput: move |evt| address_input.set(Some(evt.value().clone())),
}
}
}
div {
label {
for: "username-entry",
"Username:"
//style: "color: rgba(255, 255, 255, 0.5); font-variation-settings: 'FILL' 1, 'wght' 700, 'GRAD' 0, 'opsz' 48; vertical-align: middle; font-size: 35px; user-select: none;",
}
input {
id: "username-entry",
placeholder: "username",
value: "{username.read()}",
autofocus: "true",
oninput: move |evt| username.set(evt.value().clone()),
}
}
div {
match &*last_status.read() {
None => rsx!(div {
class: "login_status",
span {"···"}
}),
Some(Ok(ServerStatus { success: false, .. })) => rsx!(div {
class: "login_status is_error",
span {
"Could not reach server"
}
}),
Some(Ok(status)) => rsx!(div {
class: "login_status",
if let (Some(users), Some(max_users)) = (status.users, status.max_users) {
span {"{users}/{max_users} Online"}
} else {
span {"Unknown Online"}
}
span {"-"}
if let Some((maj, min, pat)) = status.version {
span {"Version: {maj}.{min}.{pat}"}
} else {
span {"Unknown Version"}
}
}),
Some(Err(_)) => rsx!(div {
class: "login_status is_error",
span {
"Could not reach proxy server"
}
}),
}
div {
{bottom}
}
}
}
)
// rsx!(
// div {
// class: "{login_box}",
// h1 {
// "Mumble Web"
// }
// input {
// placeholder: "username",
// value: "{username.read()}",
// autofocus: "true",
// oninput: move |evt| username.set(evt.value().clone()),
// }
// input {
// placeholder: "server address",
// value: "{address.read()}",
// autofocus: "true",
// oninput: move |evt| address_input.set(Some(evt.value().clone())),
// }
// {bottom}
// }
// )
}
pub fn app() -> Element {
static STYLE: Asset = asset!("/assets/main.scss");
use_coroutine(|rx: UnboundedReceiver<Command>| super::network_entrypoint(rx));
let config = use_resource(|| async move {
match imp::load_config().await {
Ok(config) => config,
Err(_) => ClientConfig::default(),
}
});
rsx!(
document::Link{ rel: "stylesheet", href: "https://fonts.googleapis.com/css2?family=Nunito:ital,wght@0,200..1000;1,200..1000&display=swap" }
document::Link{ rel: "stylesheet", href: "https://fonts.googleapis.com/css2?family=Material+Symbols+Outlined:opsz,wght,FILL,GRAD@20..48,100..700,0..1,-50..200" }
document::Link{ rel: "stylesheet", href: STYLE }
match *STATE.status.read() {
Connected => rsx!(ServerView { config }),
_ => rsx!(LoginView { config }),
}
)
}
+25 -86
View File
@@ -1,42 +1,15 @@
use crossbeam::atomic::AtomicCell;
use df::tract::{mut_slice_as_arrayviewmut, slice_as_arrayview};
use df::tract::{DfParams, DfTract, RuntimeParams};
use std::borrow::Cow;
use dioxus::prelude::{asset, manganis, Asset};
use dioxus_asset_resolver::read_asset_bytes;
use std::cell::RefCell;
use std::sync::Arc;
use tracing::{error, info};
use crate::imp::SpawnHandle;
use crate::imp;
#[cfg(not(feature = "embed-denoiser"))]
async fn denoiser_model_bytes() -> color_eyre::Result<Cow<'static, [u8]>> {
use manganis::{asset, Asset};
static DF_MODEL: Asset = asset!("/assets/DeepFilterNet3_ll_onnx.tar.gz");
let bytes = dioxus_asset_resolver::read_asset_bytes(&DF_MODEL.to_string()).await?;
Ok(Cow::Owned(bytes))
}
#[cfg(feature = "embed-denoiser")]
async fn denoiser_model_bytes() -> color_eyre::Result<Cow<'static, [u8]>> {
static DF_MODEL: &[u8] =
include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/assets/DeepFilterNet3_ll_onnx.tar.gz"));
Ok(Cow::Borrowed(DF_MODEL))
}
// TODO: make this user configurable.
static DEFAULT_NOISE_FLOOR: f32 = 0.001;
// 200ms hold at 48kHz sample rate
static HOLD_SAMPLES_MAX: usize = 48000 / 5; // 9600 samples = 200ms
/// Indicates the transmission state after processing audio.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TransmitState {
/// Audio is above threshold, or below but within hold period - transmit normally
Transmitting,
/// Hold period expired - send this frame as terminator (end_bit = true)
Terminator,
/// Silent and not transmitting - don't send anything
Silent,
}
static DF_MODEL: Asset = asset!("/assets/DeepFilterNet3_ll_onnx.tar.gz");
enum DenoisingModelState {
Nothing,
@@ -44,7 +17,10 @@ enum DenoisingModelState {
Availible(Box<DfTract>),
}
fn with_denoising_model<O>(spawn: &SpawnHandle, func: impl FnOnce(&mut DfTract) -> O) -> Option<O> {
fn with_denoising_model<O>(
spawn: &imp::SpawnHandle,
func: impl FnOnce(&mut DfTract) -> O,
) -> Option<O> {
// Using a thread local is super gross, but DfTract is not Send (so it can never leave the current
// thread) while AudioProcessing itself might change threads whenever.
thread_local! {
@@ -57,10 +33,10 @@ fn with_denoising_model<O>(spawn: &SpawnHandle, func: impl FnOnce(&mut DfTract)
let cell_task = cell.clone();
*state = DenoisingModelState::Downloading(cell);
spawn.spawn(async move {
let model_bytes = match denoiser_model_bytes().await {
let model_bytes = match read_asset_bytes(&DF_MODEL).await {
Ok(b) => b,
Err(e) => {
error!("could not read denoising model: {e:?}");
error!("could not read denoising model from \"{DF_MODEL}\": {e:?}");
return;
}
};
@@ -98,41 +74,36 @@ fn with_denoising_model<O>(spawn: &SpawnHandle, func: impl FnOnce(&mut DfTract)
pub struct AudioProcessor {
denoise: bool,
spawn: SpawnHandle,
spawn: imp::SpawnHandle,
buffer: Vec<f32>,
noise_floor: f32,
/// Whether we were transmitting in the previous frame
was_transmitting: bool,
/// Number of samples we've been below threshold (for hold period)
hold_samples: usize,
}
impl AudioProcessor {
pub fn new(denoise: bool) -> Self {
pub fn new_plain() -> Self {
AudioProcessor {
denoise,
spawn: SpawnHandle::current(),
denoise: false,
spawn: imp::SpawnHandle::current(),
buffer: Vec::new(),
}
}
pub fn new_denoising() -> Self {
AudioProcessor {
denoise: true,
spawn: imp::SpawnHandle::current(),
buffer: Vec::new(),
noise_floor: DEFAULT_NOISE_FLOOR,
was_transmitting: false,
hold_samples: 0,
}
}
}
impl AudioProcessor {
pub fn process(
&mut self,
audio: &[f32],
channels: usize,
output: &mut Vec<f32>,
) -> TransmitState {
pub fn process(&mut self, audio: &[f32], output: &mut Vec<f32>) {
let mut include_raw = true;
if self.denoise {
with_denoising_model(&self.spawn, |df| {
include_raw = false;
self.buffer.extend(audio.iter().step_by(channels).copied());
self.buffer.extend_from_slice(audio);
output.reserve(audio.len());
let hop = df.hop_size;
@@ -159,40 +130,8 @@ impl AudioProcessor {
}
if include_raw {
output.extend(audio.iter().step_by(channels).copied());
output.extend_from_slice(audio);
}
// Calculate average amplitude for VAD
let avg: f32 = if output.is_empty() {
0.0
} else {
output.iter().map(|x| x.abs()).sum::<f32>() / output.len() as f32
};
let above_threshold = avg >= self.noise_floor;
let samples_in_frame = output.len();
let state = if above_threshold {
// Above threshold - reset hold counter and transmit
self.hold_samples = 0;
self.was_transmitting = true;
TransmitState::Transmitting
} else if self.was_transmitting && self.hold_samples < HOLD_SAMPLES_MAX {
// Below threshold but in hold period - keep transmitting
self.hold_samples += samples_in_frame;
TransmitState::Transmitting
} else if self.was_transmitting {
// Hold period expired - send terminator
self.was_transmitting = false;
self.hold_samples = 0;
TransmitState::Terminator
} else {
// Not transmitting and below threshold - stay silent
output.clear(); // Don't accumulate stale audio during silence
TransmitState::Silent
};
state
}
}
+313
View File
@@ -0,0 +1,313 @@
use crate::app::Command;
use crate::effects::{AudioProcessor, AudioProcessorSender};
use color_eyre::eyre::{eyre, Context, Error};
use cpal::traits::{DeviceTrait, HostTrait};
use dioxus::hooks::UnboundedReceiver;
use futures::io::{AsyncRead, AsyncWrite};
use mumble_protocol::control::ClientControlCodec;
use mumble_web2_common::ClientConfig;
use std::mem::replace;
use std::net::ToSocketAddrs;
use std::sync::Arc;
use std::sync::Mutex;
use tokio::net::TcpStream;
use tokio_rustls::rustls;
use tokio_rustls::rustls::client::danger::{HandshakeSignatureValid, ServerCertVerifier};
use tokio_rustls::rustls::pki_types::{CertificateDer, ServerName, UnixTime};
use tokio_rustls::rustls::ClientConfig as RlsClientConfig;
use tokio_rustls::rustls::DigitallySignedStruct;
use tokio_rustls::TlsConnector;
use tokio_util::compat::{TokioAsyncReadCompatExt as _, TokioAsyncWriteCompatExt as _};
use tracing::{error, info, instrument, warn};
pub use tokio::runtime::Handle as SpawnHandle;
pub use tokio::task::spawn;
pub use tokio::time::sleep;
pub trait ImpRead: AsyncRead + Unpin + Send + 'static {}
impl<T: AsyncRead + Unpin + Send + 'static> ImpRead for T {}
pub trait ImpWrite: AsyncWrite + Unpin + Send + 'static {}
impl<T: AsyncWrite + Unpin + Send + 'static> ImpWrite for T {}
pub struct AudioSystem {
output: cpal::Device,
input: cpal::Device,
processors: AudioProcessorSender,
recording_stream: Option<cpal::Stream>,
}
const SAMPLE_RATE: u32 = 48_000;
const PACKET_SAMPLES: u32 = 960;
type Buffer = Arc<Mutex<dasp_ring_buffer::Bounded<Vec<i16>>>>;
impl AudioSystem {
pub fn new() -> Result<Self, Error> {
// TODO
let host = cpal::default_host();
let name = host.id();
let processors = AudioProcessorSender::default();
Ok(AudioSystem {
output: host
.default_output_device()
.ok_or(eyre!("no output devices from {name:?}"))?,
input: host
.default_input_device()
.ok_or(eyre!("no input devices from {name:?}"))?,
processors,
recording_stream: None,
})
}
pub fn set_processor(&self, processor: AudioProcessor) {
self.processors.store(Some(processor))
}
pub fn start_recording(
&mut self,
mut each: impl FnMut(Vec<u8>) + Send + 'static,
) -> Result<(), Error> {
let mut encoder =
opus::Encoder::new(SAMPLE_RATE, opus::Channels::Mono, opus::Application::Voip)?;
let mut current_processor = AudioProcessor::new_plain();
let mut output_buffer = Vec::new();
let processors = self.processors.clone();
let error_callback = move |e: cpal::StreamError| error!("error recording: {e:?}");
let data_callback = move |frame: &[f32], _: &cpal::InputCallbackInfo| {
if let Some(new_processor) = processors.take() {
current_processor = new_processor;
}
current_processor.process(frame, &mut output_buffer);
if output_buffer.len() < PACKET_SAMPLES as usize {
return;
}
let remainder = output_buffer.split_off(PACKET_SAMPLES as usize);
let frame = replace(&mut output_buffer, remainder);
match encoder.encode_vec_float(&frame, frame.len() * 2) {
Ok(buf) => {
each(buf);
}
Err(e) => {
error!("error encoding {} samples: {e:?}", frame.len());
}
}
};
match self.input.build_input_stream(
&cpal::StreamConfig {
channels: 1,
sample_rate: cpal::SampleRate(SAMPLE_RATE),
buffer_size: cpal::BufferSize::Fixed(PACKET_SAMPLES),
},
data_callback,
error_callback,
None,
) {
Ok(stream) => {
self.recording_stream = Some(stream);
Ok(())
}
Err(err) => {
self.recording_stream = None;
Err(err.into())
}
}
}
pub fn create_player(&mut self) -> Result<AudioPlayer, Error> {
let buffer = Arc::new(Mutex::new(dasp_ring_buffer::Bounded::from_raw_parts(
0,
0,
vec![
0;
SAMPLE_RATE as usize/4 // 250ms of buffer
],
)));
let decoder = opus::Decoder::new(SAMPLE_RATE, opus::Channels::Mono)?;
let stream = {
let buffer = buffer.clone();
self.output.build_output_stream(
&cpal::StreamConfig {
channels: 1,
sample_rate: cpal::SampleRate(SAMPLE_RATE),
buffer_size: cpal::BufferSize::Fixed(480), // 10ms playback delay
},
move |frame, info| {
let mut buffer = buffer.lock().unwrap();
for x in frame.iter_mut() {
match buffer.pop() {
Some(y) => {
*x = y;
}
None => {
*x = 0;
}
}
}
},
move |err| error!("could not create output stream {err:?}"),
None,
)?
};
Ok(AudioPlayer {
decoder,
stream,
buffer,
tmp: vec![0; 2400],
})
}
}
pub struct AudioPlayer {
decoder: opus::Decoder,
stream: cpal::Stream,
buffer: Buffer,
tmp: Vec<i16>,
}
impl AudioPlayer {
pub fn play_opus(&mut self, payload: &[u8]) {
let len = loop {
match self.decoder.decode(payload, &mut self.tmp, false) {
Ok(l) => break l,
Err(e) => {
error!("opus decode error {e:?}");
return;
}
}
};
let mut buffer = self.buffer.lock().unwrap();
let mut overrun = 0;
for x in &self.tmp[..len] {
if let Some(_) = buffer.push(*x) {
overrun += 1;
}
}
if overrun > 0 {
warn!("playback overrun by {overrun} samples");
}
}
}
#[derive(Debug)]
struct NoCertificateVerification;
impl ServerCertVerifier for NoCertificateVerification {
fn verify_server_cert(
&self,
_end_entity: &CertificateDer<'_>,
_intermediates: &[CertificateDer<'_>],
_server_name: &ServerName<'_>,
_ocsp: &[u8],
_now: UnixTime,
) -> Result<rustls::client::danger::ServerCertVerified, rustls::Error> {
Ok(rustls::client::danger::ServerCertVerified::assertion())
}
fn verify_tls12_signature(
&self,
_message: &[u8],
_cert: &CertificateDer<'_>,
_dss: &DigitallySignedStruct,
) -> Result<HandshakeSignatureValid, rustls::Error> {
Ok(HandshakeSignatureValid::assertion())
}
fn verify_tls13_signature(
&self,
_message: &[u8],
_cert: &CertificateDer<'_>,
_dss: &DigitallySignedStruct,
) -> Result<HandshakeSignatureValid, rustls::Error> {
Ok(HandshakeSignatureValid::assertion())
}
fn supported_verify_schemes(&self) -> Vec<rustls::SignatureScheme> {
vec![
rustls::SignatureScheme::RSA_PKCS1_SHA1,
rustls::SignatureScheme::ECDSA_SHA1_Legacy,
rustls::SignatureScheme::RSA_PKCS1_SHA256,
rustls::SignatureScheme::ECDSA_NISTP256_SHA256,
rustls::SignatureScheme::RSA_PKCS1_SHA384,
rustls::SignatureScheme::ECDSA_NISTP384_SHA384,
rustls::SignatureScheme::RSA_PKCS1_SHA512,
rustls::SignatureScheme::ECDSA_NISTP521_SHA512,
rustls::SignatureScheme::RSA_PSS_SHA256,
rustls::SignatureScheme::RSA_PSS_SHA384,
rustls::SignatureScheme::RSA_PSS_SHA512,
rustls::SignatureScheme::ED25519,
rustls::SignatureScheme::ED448,
]
}
}
#[instrument]
pub async fn network_connect(
address: String,
username: String,
event_rx: &mut UnboundedReceiver<Command>,
gui_config: &ClientConfig,
) -> Result<(), Error> {
info!("connecting");
let config = RlsClientConfig::builder()
.dangerous()
.with_custom_certificate_verifier(Arc::new(NoCertificateVerification))
.with_no_client_auth();
let connector = TlsConnector::from(Arc::new(config));
let addr = format!("{}:{}", address, 64738)
.to_socket_addrs()?
.next()
.unwrap();
let server_tcp = TcpStream::connect(addr).await?;
let server_stream = connector
//.connect("127.0.0.1".try_into()?, server_tcp)
.connect(address.try_into()?, server_tcp)
.await?;
let (read_server, write_server) = tokio::io::split(server_stream);
let read_codec = ClientControlCodec::new();
let write_codec = ClientControlCodec::new();
let reader = asynchronous_codec::FramedRead::new(read_server.compat(), read_codec);
let writer = asynchronous_codec::FramedWrite::new(write_server.compat_write(), write_codec);
crate::network_loop(username, event_rx, reader, writer).await
}
pub fn set_default_username(username: &str) -> Option<()> {
None
}
pub fn load_username() -> Option<String> {
return None;
}
pub async fn load_config() -> color_eyre::Result<ClientConfig> {
Ok(ClientConfig {
proxy_url: None,
status_url: None,
cert_hash: None,
any_server: true,
})
}
pub fn init_logging() {
use tracing::level_filters::LevelFilter;
use tracing_subscriber::filter::EnvFilter;
let env_filter = EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env_lossy();
tracing_subscriber::fmt()
.with_target(true)
.with_level(true)
.with_env_filter(env_filter)
.init();
}
+11
View File
@@ -0,0 +1,11 @@
#[cfg(feature = "web")]
mod web;
#[cfg(feature = "desktop")]
mod desktop;
#[cfg(all(feature = "web", not(feature = "desktop")))]
pub use web::*;
#[cfg(feature = "desktop")]
pub use desktop::*;
+149 -260
View File
@@ -1,24 +1,21 @@
use crate::app::{Command, SharedState};
use crate::effects::{AudioProcessor, AudioProcessorSender, TransmitState};
use crate::Reactivity;
use crate::app::Command;
use crate::effects::{AudioProcessor, AudioProcessorSender};
use color_eyre::eyre::{bail, eyre, Error};
use crossbeam::atomic::AtomicCell;
use futures_channel::mpsc::UnboundedReceiver;
use dioxus::prelude::*;
use futures::{AsyncRead, AsyncWrite};
use gloo_timers::future::TimeoutFuture;
use js_sys::Float32Array;
use manganis::asset;
use mumble_protocol::control::ClientControlCodec;
use mumble_web2_common::{ProxyOverrides, ServerStatus};
use mumble_web2_common::ClientConfig;
use reqwest::Url;
use std::collections::HashMap;
use std::future::Future;
use std::sync::Arc;
use std::time::Duration;
use tracing::level_filters::LevelFilter;
use tracing::{debug, error, info, instrument};
use wasm_bindgen::prelude::*;
use wasm_bindgen_futures::JsFuture;
use web_sys::js_sys::{Promise, Reflect, Uint8Array};
use web_sys::AudioContext;
use web_sys::AudioContextOptions;
use web_sys::AudioData;
use web_sys::AudioDecoder;
@@ -31,108 +28,27 @@ use web_sys::AudioWorkletNode;
use web_sys::EncodedAudioChunk;
use web_sys::EncodedAudioChunkInit;
use web_sys::EncodedAudioChunkType;
use web_sys::MediaStream;
use web_sys::MediaStreamConstraints;
use web_sys::MediaStreamTrackGenerator;
use web_sys::MediaStreamTrackGeneratorInit;
use web_sys::MessageEvent;
use web_sys::WebTransport;
use web_sys::WebTransportBidirectionalStream;
use web_sys::WebTransportOptions;
use web_sys::WorkletOptions;
use web_sys::{console, window};
use web_sys::{AudioContext, AudioDataCopyToOptions};
#[allow(unused)]
pub use wasm_bindgen_futures::spawn_local as spawn;
#[allow(unused)]
#[derive(Clone)]
pub struct SpawnHandle;
pub trait ImpRead: AsyncRead + Unpin + 'static {}
impl<T: AsyncRead + Unpin + 'static> ImpRead for T {}
impl SpawnHandle {
pub fn spawn<F>(&self, future: F)
where
F: Future<Output = ()> + 'static,
{
wasm_bindgen_futures::spawn_local(future);
}
pub trait ImpWrite: AsyncWrite + Unpin + 'static {}
impl<T: AsyncWrite + Unpin + 'static> ImpWrite for T {}
pub fn current() -> Self {
SpawnHandle
}
}
/// Web platform implementation using WebTransport and Web Audio API.
pub struct WebPlatform;
impl super::PlatformInterface for WebPlatform {
type AudioSystem = WebAudioSystem;
type ConfigSystem = WebConfigSystem;
fn init_logging() {
// copied from tracing_web example usage
use tracing_subscriber::fmt::format::Pretty;
use tracing_subscriber::prelude::*;
use tracing_web::{performance_layer, MakeWebConsoleWriter};
let fmt_layer = tracing_subscriber::fmt::layer()
.with_ansi(false) // Only partially supported across browsers
.without_time() // std::time is not available in browsers
.with_writer(MakeWebConsoleWriter::new()) // write events to the console
.with_filter(LevelFilter::DEBUG);
let perf_layer = performance_layer().with_details_from_fields(Pretty::default());
tracing_subscriber::registry()
.with(fmt_layer)
.with(perf_layer)
.init();
info!("logging initiated");
}
fn request_permissions() {
// No-op on web
}
async fn load_proxy_overrides() -> color_eyre::Result<ProxyOverrides> {
let overrides = match option_env!("MUMBLE_WEB2_PROXY_OVERRIDES_URL") {
Some(url) => Url::parse(url)?,
None => absolute_url("overrides")?,
};
info!("loading config from {}", overrides);
let config = reqwest::get(overrides)
.await?
.json::<ProxyOverrides>()
.await?;
Ok(config)
}
async fn network_connect(
address: String,
username: String,
event_rx: &mut UnboundedReceiver<Command>,
overrides: &ProxyOverrides,
state: SharedState<impl Reactivity>,
) -> Result<(), Error> {
network_connect(address, username, event_rx, overrides, state).await
}
async fn get_status(
client: &reqwest::Client,
_address: &str,
) -> color_eyre::Result<ServerStatus> {
Ok(client
.get(absolute_url("status")?)
.send()
.await?
.json::<ServerStatus>()
.await?)
}
async fn sleep(duration: Duration) {
TimeoutFuture::new(duration.as_millis() as u32).await;
}
pub async fn sleep(d: Duration) {
TimeoutFuture::new(d.as_millis() as u32).await
}
trait ResultExt<T> {
@@ -157,59 +73,25 @@ impl<T> ResultExt<T> for Result<T, JsError> {
}
}
pub struct WebAudioSystem {
pub struct AudioSystem {
webctx: AudioContext,
processors: AudioProcessorSender,
}
async fn attach_worklet(audio_context: &AudioContext, worklet_url: &str) -> Result<(), Error> {
// Create worklets to process mic and speaker audio
// Speaker audio processing worklet only required on
// browsers that don't support MediaStreamTrackGenerator
let options = WorkletOptions::new();
Reflect::set(
&options,
&"processorOptions".into(),
&wasm_bindgen::module(),
)
.ey()?;
info!("loading mic worklet from {worklet_url:?}");
audio_context
.audio_worklet()
.ey()?
.add_module_with_options(worklet_url, &options)
.ey()?
.into_future()
.await
.ey()?;
Ok(())
}
impl super::AudioSystemInterface for WebAudioSystem {
type AudioPlayer = WebAudioPlayer;
async fn new() -> Result<Self, Error> {
impl AudioSystem {
pub fn new() -> Result<Self, Error> {
// Create MediaStreams to playback decoded audio
// The audio context is used to reproduce audio.
let webctx = configure_audio_context();
attach_worklet(
&webctx,
&asset!("/assets/rust_audio_worklet.js").to_string(),
)
.await?;
let processors = AudioProcessorSender::default();
Ok(WebAudioSystem { webctx, processors })
Ok(AudioSystem { webctx, processors })
}
fn set_processor(&self, processor: AudioProcessor) {
pub fn set_processor(&self, processor: AudioProcessor) {
self.processors.store(Some(processor))
}
fn start_recording(&mut self, each: impl FnMut(Vec<u8>, bool) + 'static) -> Result<(), Error> {
pub fn start_recording(&mut self, each: impl FnMut(Vec<u8>) + 'static) -> Result<(), Error> {
let audio_context_worklet = self.webctx.clone();
let processors = self.processors.clone();
spawn(async move {
@@ -221,11 +103,19 @@ impl super::AudioSystemInterface for WebAudioSystem {
Ok(())
}
fn create_player(&mut self) -> Result<WebAudioPlayer, Error> {
let sink_node = AudioWorkletNode::new(&self.webctx, "rust_speaker_worklet").ey()?;
pub fn create_player(&mut self) -> Result<AudioPlayer, Error> {
let audio_stream_generator =
MediaStreamTrackGenerator::new(&MediaStreamTrackGeneratorInit::new("audio")).ey()?;
// Connect worklet to destination
sink_node
// Create MediaStream from MediaStreamTrackGenerator
let js_tracks = web_sys::js_sys::Array::new();
js_tracks.push(&audio_stream_generator);
let media_stream = MediaStream::new_with_tracks(&js_tracks).ey()?;
// Create MediaStreamAudioSourceNode
let audio_source = self.webctx.create_media_stream_source(&media_stream).ey()?;
// Connect output of audio_source to audio_context (browser audio)
audio_source
.connect_with_audio_node(&self.webctx.destination())
.ey()?;
@@ -234,31 +124,28 @@ impl super::AudioSystemInterface for WebAudioSystem {
error!("error decoding audio {:?}", e);
}) as Box<dyn FnMut(JsValue)>);
let sink_port = sink_node.port().ey()?;
// This knows what MediaStreamTrackGenerator to use as it closes around it
let output = Closure::wrap(Box::new(move |audio_data: AudioData| {
// Extract planar PCM from AudioData into an ArrayBuffer or Float32Array
// Here we assume f32 samples, 1 channel for brevity.
let number_of_frames = audio_data.number_of_frames();
let js_buffer = Float32Array::new_with_length(number_of_frames);
let audio_data_copy_to_options = &AudioDataCopyToOptions::new(0);
audio_data_copy_to_options.set_format(web_sys::AudioSampleFormat::F32);
if let Err(e) = audio_data
.copy_to_with_buffer_source(&js_buffer.buffer(), &audio_data_copy_to_options)
{
error!("could not copy audio data to array {:?}", e);
let writable = audio_stream_generator.writable();
if writable.locked() {
return;
}
if let Err(e) = writable.get_writer().map(|writer| {
spawn(async move {
if let Err(e) = JsFuture::from(writer.ready()).await.ey() {
error!("write chunk ready error {:?}", e);
}
if let Err(e) = JsFuture::from(writer.write_with_chunk(&audio_data))
.await
.ey()
{
error!("write chunk error {:?}", e);
};
writer.release_lock();
});
}) {
error!("error writing audio data {:?}", e);
}
// Post to the worklet; include sampleRate and channel count if needed.
let msg = js_sys::Object::new();
js_sys::Reflect::set(&msg, &"samples".into(), &js_buffer).unwrap();
sink_port.post_message(&msg).unwrap();
audio_data.close();
}) as Box<dyn FnMut(AudioData)>);
let audio_decoder = AudioDecoder::new(&AudioDecoderInit::new(
@@ -274,14 +161,14 @@ impl super::AudioSystemInterface for WebAudioSystem {
decoder_error.forget();
output.forget();
Ok(WebAudioPlayer(audio_decoder))
Ok(AudioPlayer(audio_decoder))
}
}
pub struct WebAudioPlayer(AudioDecoder);
pub struct AudioPlayer(AudioDecoder);
impl super::AudioPlayerInterface for WebAudioPlayer {
fn play_opus(&mut self, payload: &[u8]) {
impl AudioPlayer {
pub fn play_opus(&mut self, payload: &[u8]) {
let js_audio_payload = Uint8Array::from(payload);
let _ = self.0.decode(
&EncodedAudioChunk::new(&EncodedAudioChunkInit::new(
@@ -313,26 +200,22 @@ impl PromiseExt for Promise {
}
}
fn process_audio(frame: &JsValue, processor: &mut AudioProcessor) -> TransmitState {
fn process_audio(frame: &JsValue, processor: &mut AudioProcessor) {
let Ok(samples) = Reflect::get(&frame, &"data".into()) else {
return TransmitState::Silent;
return;
};
let Ok(samples) = samples.dyn_into::<Float32Array>() else {
return TransmitState::Silent;
return;
};
let input = samples.to_vec();
let mut output = Vec::with_capacity(input.len());
let state = processor.process(&input, 1, &mut output);
if !output.is_empty() {
samples.copy_from(&output);
}
state
processor.process(&input, &mut output);
samples.copy_from(&output);
}
async fn run_encoder_worklet(
audio_context: &AudioContext,
mut each: impl FnMut(Vec<u8>, bool) + 'static,
mut each: impl FnMut(Vec<u8>) + 'static,
processors: AudioProcessorSender,
) -> Result<AudioWorkletNode, Error> {
let constraints = MediaStreamConstraints::new();
@@ -351,25 +234,37 @@ async fn run_encoder_worklet(
.map_err(|e| JsError::new(&format!("not a stream: {e:?}")))
.ey()?;
let options = WorkletOptions::new();
Reflect::set(
&options,
&"processorOptions".into(),
&wasm_bindgen::module(),
)
.ey()?;
let module = asset!("assets/rust_mic_worklet.js").to_string();
info!("loading mic worklet from {module:?}");
audio_context
.audio_worklet()
.ey()?
.add_module_with_options(&module, &options)
.ey()?
.into_future()
.await
.ey()?;
let source = audio_context.create_media_stream_source(&stream).ey()?;
let worklet_node = AudioWorkletNode::new(audio_context, "rust_mic_worklet").ey()?;
let encoder_error: Closure<dyn FnMut(JsValue)> =
Closure::new(|e| error!("error encoding audio {:?}", e));
// Shared state to signal terminator between onmessage and output closures
// The output closure runs asynchronously after encoding completes
let pending_terminator = Arc::new(AtomicCell::new(false));
let pending_terminator_output = pending_terminator.clone();
// This knows what MediaStreamTrackGenerator to use as it closes around it
let output: Closure<dyn FnMut(EncodedAudioChunk)> =
Closure::new(move |audio_data: EncodedAudioChunk| {
let mut array = vec![0u8; audio_data.byte_length() as usize];
audio_data.copy_to_with_u8_slice(&mut array);
// Check if this frame was marked as a terminator
let is_terminator = pending_terminator_output.swap(false);
each(array, is_terminator);
each(array);
});
let audio_encoder = AudioEncoder::new(&AudioEncoderInit::new(
@@ -389,26 +284,15 @@ async fn run_encoder_worklet(
audio_encoder.configure(&encoder_config);
info!("created audio encoder");
let mut current_processor = AudioProcessor::new(false);
let mut current_processor = AudioProcessor::new_plain();
let onmessage: Closure<dyn FnMut(MessageEvent)> = Closure::new(move |event: MessageEvent| {
if let Some(new_processor) = processors.take() {
current_processor = new_processor;
}
let frame = event.data();
let state = process_audio(&frame, &mut current_processor);
process_audio(&frame, &mut current_processor);
match state {
TransmitState::Silent => {
// Don't encode or send anything
return;
}
TransmitState::Transmitting => (), // Normal transmission
TransmitState::Terminator => {
// Mark this as a terminator before encoding
pending_terminator.store(true);
}
}
match AudioData::new(frame.unchecked_ref()) {
Ok(data) => {
let _ = audio_encoder.encode(&data);
@@ -442,8 +326,7 @@ pub async fn network_connect(
address: String,
username: String,
event_rx: &mut UnboundedReceiver<Command>,
overrides: &ProxyOverrides,
state: SharedState,
gui_config: &ClientConfig,
) -> Result<(), Error> {
info!("connecting");
@@ -456,7 +339,7 @@ pub async fn network_connect(
)
.ey()?;
if let Some(server_hash) = &overrides.cert_hash {
if let Some(server_hash) = &gui_config.cert_hash {
let hash = web_sys::js_sys::Uint8Array::from(server_hash.as_slice());
web_sys::js_sys::Reflect::set(&object, &"value".into(), &hash).ey()?;
}
@@ -502,9 +385,24 @@ pub async fn network_connect(
let writer =
asynchronous_codec::FramedWrite::new(wasm_stream_writable.into_async_write(), write_codec);
let (outgoing_send, outgoing_recv) = futures_channel::mpsc::unbounded();
spawn(crate::sender_loop(outgoing_recv, writer));
crate::network_loop(username, state, event_rx, outgoing_send, reader).await
crate::network_loop(username, event_rx, reader, writer).await
}
pub fn set_default_username(username: &str) -> Option<()> {
web_sys::window()?
.local_storage()
.ok()??
.set_item("username", username)
.ok()
}
pub fn load_username() -> Option<String> {
web_sys::window()
.unwrap()
.local_storage()
.ok()??
.get_item("username")
.ok()?
}
pub fn absolute_url(path: &str) -> Result<Url, Error> {
@@ -513,63 +411,54 @@ pub fn absolute_url(path: &str) -> Result<Url, Error> {
Ok(Url::parse(&location.href().ey()?)?.join(path)?)
}
#[derive(Clone, PartialEq)]
pub struct WebConfigSystem {}
pub async fn load_config() -> color_eyre::Result<ClientConfig> {
let config_url = match option_env!("MUMBLE_WEB2_GUI_CONFIG_URL") {
Some(url) => Url::parse(url)?,
None => absolute_url("config")?,
};
info!("loading config from {}", config_url);
impl super::ConfigSystemInterface for WebConfigSystem {
fn new() -> Result<Self, Error> {
return Ok(WebConfigSystem {});
let config = reqwest::get(config_url)
.await?
.json::<ClientConfig>()
.await?;
Ok(config)
}
pub fn init_logging() {
// copied from tracing_web example usage
use tracing_subscriber::fmt::format::Pretty;
use tracing_subscriber::prelude::*;
use tracing_web::{performance_layer, MakeWebConsoleWriter};
let fmt_layer = tracing_subscriber::fmt::layer()
.with_ansi(false) // Only partially supported across browsers
.without_time() // std::time is not available in browsers
.with_writer(MakeWebConsoleWriter::new()) // write events to the console
.with_filter(LevelFilter::DEBUG);
let perf_layer = performance_layer().with_details_from_fields(Pretty::default());
tracing_subscriber::registry()
.with(fmt_layer)
.with(perf_layer)
.init();
info!("logging initiated");
}
pub struct SpawnHandle;
impl SpawnHandle {
pub fn current() -> Self {
SpawnHandle
}
fn config_get<T>(&self, key: &str) -> Option<T>
pub fn spawn<F>(&self, future: F)
where
T: serde::de::DeserializeOwned,
F: Future<Output = ()> + 'static,
{
// Get Storage
let storage = web_sys::window()?.local_storage().ok()??;
// Try localStorage first
if let Ok(Some(raw)) = storage.get_item(key) {
if let Ok(parsed) = serde_json::from_str::<T>(&raw) {
return Some(parsed);
}
}
// Fallback to default if deserialization fails or key missing
let default_value = config_get_default(key)?;
serde_json::from_value::<T>(default_value).ok()
}
fn config_set<T>(&self, key: &str, value: &T)
where
T: serde::Serialize,
{
let storage = window()
.and_then(|w| w.local_storage().ok().flatten())
.expect("localStorage not available");
let json_value =
serde_json::to_string(value).expect("failed to serialize config value to JSON string");
storage
.set_item(key, &json_value)
.expect("failed to write to localStorage");
spawn(future);
}
}
fn config_get_default(key: &str) -> Option<serde_json::Value> {
let default_config = platform_default_config();
default_config
.get(key)
.cloned()
.or(super::global_default_config().get(key).cloned())
}
fn platform_default_config() -> HashMap<String, serde_json::Value> {
serde_json::json!({})
.as_object()
.unwrap()
.clone()
.into_iter()
.collect()
}
+103 -87
View File
@@ -1,19 +1,18 @@
use crate::msghtml::process_message_html;
use crate::AudioSettings;
use crate::Chat;
use crate::Command;
use crate::ConnectionState;
use crate::Reactivity;
use app::Chat;
use app::Command;
use app::ConnectionState;
use app::STATE;
use asynchronous_codec::FramedRead;
use asynchronous_codec::FramedWrite;
use color_eyre::eyre::{bail, Error};
use dioxus::prelude::*;
use futures::select;
use futures::AsyncRead;
use futures::AsyncWrite;
use futures::FutureExt as _;
use futures::SinkExt as _;
use futures::StreamExt as _;
use futures_channel::mpsc::{UnboundedReceiver, UnboundedSender};
use futures_channel::mpsc::UnboundedSender;
pub use imp::spawn;
use msghtml::process_message_html;
use mumble_protocol::control::msgs;
use mumble_protocol::control::ControlCodec;
use mumble_protocol::control::ControlPacket;
@@ -21,24 +20,24 @@ use mumble_protocol::voice::VoicePacket;
use mumble_protocol::voice::VoicePacketPayload;
use mumble_protocol::Clientbound;
use mumble_protocol::Serverbound;
use mumble_web2_common::ClientConfig;
use once_cell::sync::Lazy;
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::time::Duration;
use tracing::debug;
use tracing::error;
use tracing::info;
use crate::app::SharedState;
use crate::app::State;
use crate::effects::AudioProcessor;
use crate::imp::{
spawn, AudioPlayer, AudioPlayerInterface as _, AudioSystem, AudioSystemInterface as _,
Platform, PlatformInterface as _,
};
use crate::imp::AudioSystem;
pub async fn network_entrypoint<X: Reactivity>(
mut event_rx: UnboundedReceiver<Command>,
state: SharedState<X>,
) {
pub mod app;
mod effects;
pub mod imp;
mod msghtml;
pub async fn network_entrypoint(mut event_rx: UnboundedReceiver<Command>) {
loop {
let Some(Command::Connect {
address,
@@ -49,43 +48,35 @@ pub async fn network_entrypoint<X: Reactivity>(
panic!("did not receive connect command")
};
*X::write(&state.server) = Default::default();
*X::write(&state.status) = ConnectionState::Connecting;
if let Err(error) =
Platform::network_connect(address, username, &mut event_rx, &config, state.clone())
.await
{
*STATE.server.write() = Default::default();
*STATE.status.write() = ConnectionState::Connecting;
if let Err(error) = imp::network_connect(address, username, &mut event_rx, &config).await {
error!("could not connect {:?}", error);
*X::write(&state.status) = ConnectionState::Failed(error.to_string());
*STATE.status.write() = ConnectionState::Failed(error.to_string());
} else {
*X::write(&state.status) = ConnectionState::Disconnected;
*STATE.status.write() = ConnectionState::Disconnected;
}
}
}
pub(crate) async fn sender_loop<W: AsyncWrite + Unpin + 'static>(
mut outgoing: UnboundedReceiver<ControlPacket<Serverbound>>,
mut writer: FramedWrite<W, ControlCodec<Serverbound, Clientbound>>,
) {
while let Some(msg) = outgoing.next().await {
if !matches!(msg, ControlPacket::Ping(_) | ControlPacket::UDPTunnel(_)) {
info!("sending packet {:#?}", msg);
}
if let Err(e) = writer.send(msg).await {
error!("error sending packet {:?}", e);
break;
}
}
}
pub(crate) async fn network_loop<R: AsyncRead + Unpin + 'static, X: Reactivity>(
pub async fn network_loop<R: imp::ImpRead, W: imp::ImpWrite>(
username: String,
state: SharedState<X>,
event_rx: &mut UnboundedReceiver<Command>,
mut outgoing: UnboundedSender<ControlPacket<Serverbound>>,
mut reader: FramedRead<R, ControlCodec<Serverbound, Clientbound>>,
mut writer: FramedWrite<W, ControlCodec<Serverbound, Clientbound>>,
) -> Result<(), Error> {
let audio_settings = X::read(&state.audio).clone();
let (mut send_chan, mut writer_recv_chan) = futures_channel::mpsc::unbounded();
spawn(async move {
while let Some(msg) = writer_recv_chan.next().await {
if !matches!(msg, ControlPacket::Ping(_) | ControlPacket::UDPTunnel(_)) {
info!("sending packet {:#?}", msg);
}
if let Err(e) = writer.send(msg).await {
error!("error sending packet {:?}", e);
break;
}
}
});
// Get version packet
let version = match reader.next().await {
@@ -100,47 +91,44 @@ pub(crate) async fn network_loop<R: AsyncRead + Unpin + 'static, X: Reactivity>(
msg.set_version(0x000010204);
msg.set_release(format!("{} {}", "mumbleweb2", "6.9.0"));
//msg.set_os("Chrome".to_string());
outgoing.send(msg.into()).await.unwrap();
send_chan.send(msg.into()).await.unwrap();
// Send authenticate packet
let mut msg = msgs::Authenticate::new();
msg.set_username(username);
msg.set_opus(true);
outgoing.send(msg.into()).await.unwrap();
send_chan.send(msg.into()).await.unwrap();
// Spawn worker to send pings
{
let mut send_chan = outgoing.clone();
let mut send_chan = send_chan.clone();
spawn(async move {
loop {
if let Err(_) = send_chan.send(msgs::Ping::new().into()).await {
break;
}
Platform::sleep(Duration::from_millis(3000)).await;
imp::sleep(Duration::from_millis(3000)).await;
}
});
}
let mut audio = AudioSystem::new().await?;
audio.set_processor(AudioProcessor::new(audio_settings.denoise));
let mut audio = imp::AudioSystem::new()?;
{
let send_chan = outgoing.clone();
let send_chan = send_chan.clone();
let mut sequence_num = 0;
if let Err(err) = audio.start_recording(move |opus_frame, is_terminator| {
audio.start_recording(move |opus_frame| {
let _ =
send_chan.unbounded_send(ControlPacket::UDPTunnel(Box::new(VoicePacket::Audio {
_dst: std::marker::PhantomData,
target: 0,
session_id: (),
seq_num: sequence_num,
payload: VoicePacketPayload::Opus(opus_frame.into(), is_terminator),
payload: VoicePacketPayload::Opus(opus_frame.into(), false),
position_info: None,
})));
sequence_num = sequence_num.wrapping_add(2);
}) {
error!("could not begin recording: {err:?}")
}
});
}
// Create map of session_id -> AudioDecoder
@@ -158,7 +146,7 @@ pub(crate) async fn network_loop<R: AsyncRead + Unpin + 'static, X: Reactivity>(
if !matches!(msg, ControlPacket::UDPTunnel(_) | ControlPacket::Ping(_)) {
info!("receiving packet {:#?}", msg);
}
let res = accept_packet(msg, &mut audio, &mut decoder_map, &state);
let res = accept_packet(msg, &mut audio, &mut decoder_map);
if let Err(err) = res {
error!("error accepting packet {:?}", err)
}
@@ -177,7 +165,7 @@ pub(crate) async fn network_loop<R: AsyncRead + Unpin + 'static, X: Reactivity>(
match command {
Some(Command::Disconnect) => break,
Some(command) => {
let res = accept_command(command, &mut outgoing, &mut audio, &state);
let res = accept_command(command, &mut send_chan, &mut audio);
if let Err(err) = res {
info!("error accepting command {:?}", err)
}
@@ -187,19 +175,18 @@ pub(crate) async fn network_loop<R: AsyncRead + Unpin + 'static, X: Reactivity>(
}
}
}
let _ = outgoing.close();
let _ = send_chan.close();
Ok(())
}
fn accept_command<X: Reactivity>(
fn accept_command(
command: Command,
send_chan: &mut UnboundedSender<ControlPacket<mumble_protocol::Serverbound>>,
audio: &mut AudioSystem,
state: &State<X>,
) -> Result<(), Error> {
use Command::*;
let Some(session) = X::read(&state.server).session else {
let Some(session) = STATE.server.read().session else {
bail!("no session id")
};
@@ -222,7 +209,7 @@ fn accept_command<X: Reactivity>(
};
{
let mut server = X::write(&state.server);
let mut server = STATE.server.write();
let Some(me) = server.session else {
bail!("not signed in with a session id")
};
@@ -263,7 +250,7 @@ fn accept_command<X: Reactivity>(
};
{
let mut server = X::write(&state.server);
let mut server = STATE.server.write();
let Some(me) = server.session else {
bail!("not signed in with a session id")
};
@@ -298,19 +285,22 @@ fn accept_command<X: Reactivity>(
let _ = send_chan.unbounded_send(u.into());
}
Connect { .. } | Disconnect => (),
UpdateAudioSettings(AudioSettings { denoise }) => {
audio.set_processor(AudioProcessor::new(denoise));
UpdateMicEffects { denoise } => {
if denoise {
audio.set_processor(AudioProcessor::new_denoising());
} else {
audio.set_processor(AudioProcessor::new_plain());
}
}
}
Ok(())
}
fn accept_packet<X: Reactivity>(
fn accept_packet(
msg: ControlPacket<mumble_protocol::Clientbound>,
audio_context: &mut AudioSystem,
player_map: &mut HashMap<u32, AudioPlayer>,
state: &State<X>,
audio_context: &mut imp::AudioSystem,
player_map: &mut HashMap<u32, imp::AudioPlayer>,
) -> Result<(), Error> {
match msg {
ControlPacket::UDPTunnel(u) => {
@@ -347,15 +337,45 @@ fn accept_packet<X: Reactivity>(
}
}
ControlPacket::ChannelState(u) => {
let mut server = X::write(&state.server);
server.channels_state.update_from_channel_state(&u);
let mut server = STATE.server.write();
let id = u.get_channel_id();
let state = server.channels.entry(id).or_default();
let new_parent = if u.has_parent() {
if let Some(parent) = state.parent.and_then(|p| server.channels.get_mut(&p)) {
parent.children.remove(&id);
}
let parent_id = u.get_parent();
let parent = server.channels.entry(parent_id).or_default();
if u.has_position() && u.get_position() as usize <= parent.children.len() {
// TODO: what if positions are received out of order? we need to sort afterwards?
parent.children.insert_before(u.get_position() as usize, id);
} else {
parent.children.insert(id);
}
Some(parent_id)
} else {
None
};
let state = server.channels.entry(id).or_default();
state.parent = new_parent;
if u.has_name() {
state.name = u.get_name().to_string();
}
}
ControlPacket::ChannelRemove(u) => {
let mut server = X::write(&state.server);
server.channels_state.update_from_channel_remove(&u);
let mut server = STATE.server.write();
let id = u.get_channel_id();
if let Some(channel) = server.channels.remove(&id) {
if let Some(parent) = channel.parent.and_then(|p| server.channels.get_mut(&p)) {
parent.children.remove(&id);
}
}
}
ControlPacket::UserState(u) => {
let mut server = X::write(&state.server);
let mut server = STATE.server.write();
let server = &mut *server;
let id = u.get_session();
@@ -364,13 +384,12 @@ fn accept_packet<X: Reactivity>(
let state = state_entry.or_default();
// the server might now send a channel_id if the user is in channel=0
if u.has_channel_id() || new {
if let Some(parent) = server.channels_state.channels.get_mut(&state.channel) {
if let Some(parent) = server.channels.get_mut(&state.channel) {
parent.users.remove(&id);
}
let channel_id = u.get_channel_id();
server
.channels_state
.channels
.entry(channel_id)
.or_default()
@@ -388,9 +407,6 @@ fn accept_packet<X: Reactivity>(
if u.has_deaf() {
state.deaf = u.get_deaf();
}
if u.has_suppress() {
state.suppress = u.get_suppress();
}
if u.has_self_mute() {
state.self_mute = u.get_self_mute();
}
@@ -399,16 +415,16 @@ fn accept_packet<X: Reactivity>(
}
}
ControlPacket::UserRemove(u) => {
let mut server = X::write(&state.server);
let mut server = STATE.server.write();
let id = u.get_session();
if let Some(state) = server.users.remove(&id) {
if let Some(parent) = server.channels_state.channels.get_mut(&state.channel) {
if let Some(parent) = server.channels.get_mut(&state.channel) {
parent.users.remove(&id);
}
}
}
ControlPacket::TextMessage(u) => {
let mut server = X::write(&state.server);
let mut server = STATE.server.write();
if u.has_message() {
let text = u.get_message().to_string();
server.chat.push(Chat {
@@ -423,8 +439,8 @@ fn accept_packet<X: Reactivity>(
}
}
ControlPacket::ServerSync(u) => {
*X::write(&state.status) = ConnectionState::Connected;
let mut server = X::write(&state.server);
*STATE.status.write() = ConnectionState::Connected;
let mut server = STATE.server.write();
if u.has_welcome_text() {
let text = u.get_welcome_text().to_string();
server.chat.push(Chat {
+3 -758
View File
@@ -1,761 +1,6 @@
#![allow(non_snake_case)]
use dioxus::prelude::*;
use mumble_web2_client::{
network_entrypoint, reqwest, AudioSettings, ChannelId, Command, ConfigSystem,
ConfigSystemInterface as _, ConnectionState, Platform, PlatformInterface as _, ServerState,
UserId, UserState,
};
use mumble_web2_common::{ProxyOverrides, ServerStatus};
use std::collections::{HashMap, HashSet};
use std::{fmt, sync::Arc};
use Command::*;
use ConnectionState::*;
pub struct DioxusReactivity;
impl mumble_web2_client::Reactivity for DioxusReactivity {
type Signal<T> = Signal<T>;
fn new<T: 'static>(value: T) -> Signal<T> {
Signal::new(value)
}
fn read<T: 'static>(signal: &Signal<T>) -> impl std::ops::Deref<Target = T> {
signal.read_unchecked()
}
fn write<T: 'static>(signal: &Signal<T>) -> impl std::ops::DerefMut<Target = T> {
signal.write_unchecked()
}
}
pub type SharedState = mumble_web2_client::SharedState<DioxusReactivity>;
pub type State = mumble_web2_client::State<DioxusReactivity>;
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum UserIcon {
Normal,
Muted,
Deafened,
Suppressed,
None,
}
impl UserIcon {
pub fn icon(user: &UserState) -> UserIcon {
if user.deaf || user.self_deaf {
UserIcon::Deafened
} else if user.mute || user.self_mute {
UserIcon::Muted
} else if user.suppress {
UserIcon::Suppressed
} else {
UserIcon::Normal
}
}
pub fn url(self) -> Option<Asset> {
// speaker from https://www.svgrepo.com/collection/ikono-bold-line-icons/
// mic from https://www.svgrepo.com/collection/hashicorp-line-interface-icons/
use UserIcon::*;
Some(match self {
Normal => asset!("assets/mic-svgrepo-com.svg"),
Muted | Suppressed => asset!("assets/mic-off-svgrepo-com.svg"),
Deafened => asset!("assets/speaker-muted-svgrepo-com.svg"),
None => return Option::None,
})
}
}
#[component]
pub fn UserPill(name: String, icon: UserIcon, isself: bool) -> Element {
let color = match icon {
UserIcon::Normal => "var(--accent-normal)",
UserIcon::Muted => "var(--accent-muted)",
UserIcon::Suppressed | UserIcon::Deafened => "var(--accent-deafened)",
UserIcon::None => "var(--accent-normal)",
};
rsx!(
div {
class: match isself { true => "userpil is_self", false => "userpil" },
style: "background-color: {color}",
{ icon.url().map(|url| rsx!(img { src: url })) }
"\u{00A0}{name}\u{00A0}"
}
)
}
#[component]
pub fn User(id: UserId) -> Element {
let state = use_context::<SharedState>();
let server = state.server.read();
match server.users.get(&id) {
Some(state) => rsx!(UserPill {
name: state.name.clone(),
icon: UserIcon::icon(state),
isself: server.session.unwrap() == id,
}),
None => rsx!(UserPill {
name: format!("unknown user ({id})"),
icon: UserIcon::None,
isself: false,
}),
}
}
#[component]
pub fn Channel(id: ChannelId) -> Element {
let net: Coroutine<Command> = use_coroutine_handle();
let state = use_context::<SharedState>();
let server = state.server.read();
let user = server.session.unwrap();
let Some(state) = server.channels_state.channels.get(&id) else {
return rsx!("missing channel {id}");
};
let mut open = use_signal(|| true);
let has_children = !state.users.is_empty() || !state.children.is_empty();
rsx!(
div {
class: "channel_details",
div {
class: "channel_header",
// Arrow: only toggles open
if has_children {
span {
class: "channel_arrow",
onclick: move |evt| {
evt.stop_propagation();
evt.prevent_default();
let mut w = open.write();
*w = !*w;
},
if *open.read() { "" } else { "" }
}
} else {
span {
class: "channel_arrow channel_arrow--placeholder",
" "
}
}
// Clickable row area (everything except the arrow)
div {
class: "channel_row_click",
ondblclick: move |evt| {
evt.stop_propagation();
evt.prevent_default();
net.send(EnterChannel { channel: id, user })
},
// remove dblclick from the inner span
span {
class: "channel_title",
"{state.name}"
}
// if you add icons/badges later, put them here too
}
}
if *open.read() && has_children {
div {
class: "channel_children",
for id in state.users.iter() {
User { id: *id }
}
for child in state.children.iter() {
Channel { id: *child }
}
}
}
}
)
}
#[cfg(any(feature = "desktop", feature = "web"))]
pub fn pick_and_send_file(net: &Coroutine<Command>) {
let state = use_context::<SharedState>();
let channels = if let Some(user) = state.server.read().this_user() {
vec![user.channel]
} else {
return;
};
let dialog = rfd::AsyncFileDialog::new().pick_file();
let sender = net.tx();
spawn(async move {
let Some(handle) = dialog.await else { return };
let name = handle.file_name();
let bytes = handle.read().await;
let mime = mumble_web2_client::mime_guess::from_path(&name).first();
let _ = sender.unbounded_send(SendFile {
bytes,
name,
mime,
channels,
});
});
}
#[cfg(not(any(feature = "desktop", feature = "web")))]
pub fn pick_and_send_file(net: &Coroutine<Command>) {}
#[component]
pub fn ChatView() -> Element {
let net: Coroutine<Command> = use_coroutine_handle();
let state = use_context::<SharedState>();
let server = state.server.read();
let mut draft = use_signal(|| "".to_string());
let mut do_send = move || {
let state = use_context::<SharedState>();
let server = state.server.read();
if let Some(user) = server.this_user() {
net.send(SendChat {
markdown: draft.write().split_off(0),
channels: vec![user.channel],
});
}
};
rsx!(
div {
class: "chat_panel",
div {
class: "chat_history",
for chat in server.chat.iter() {
div {
class: "chat_message",
if let Some(sender) = chat.sender.and_then(|u| server.users.get(&u)) {
UserPill {
name: sender.name.clone(),
icon: UserIcon::None,
isself: false,
}
}
span {
dangerous_inner_html: "{chat.dangerous_html}",
}
}
}
}
div {
class: "chat_box_wrapper",
div {
class: "chat_box",
input {
placeholder: "say something",
value: "{draft.read()}",
oninput: move |evt| draft.set(evt.value().clone()),
onkeypress: move |evt: Event<KeyboardData>| {
if evt.code() == Code::Enter && evt.modifiers().is_empty() {
do_send();
}
}
}
div {
span {
onclick: move |_| pick_and_send_file(&net),
class: "material-symbols-outlined",
style: "color: rgba(255, 255, 255, 0.5); font-variation-settings: 'FILL' 1, 'wght' 700, 'GRAD' 0, 'opsz' 48; vertical-align: middle; font-size: 35px; user-select: none;",
"attach_file",
}
}
div {
span {
onclick: move |_| do_send(),
class: "material-symbols-outlined",
style: "color: rgba(255, 255, 255, 0.5); font-variation-settings: 'FILL' 1, 'wght' 700, 'GRAD' 0, 'opsz' 48; vertical-align: middle; font-size: 35px; user-select: none;",
"send",
}
}
}
//button {
// onclick: move |_| do_send(),
// "Send"
//}
}
}
)
}
#[component]
pub fn ControlView(overrides: Resource<ProxyOverrides>) -> Element {
let net: Coroutine<Command> = use_coroutine_handle();
let state = use_context::<SharedState>();
let status = &state.status;
let server = state.server.read();
let audio = state.audio.read();
let Some(&UserState {
deaf,
self_deaf,
mute,
suppress,
self_mute,
ref name,
channel,
..
}) = server.this_user()
else {
return rsx!();
};
let current_channel_name = server.channels_state.channels[&channel].name.clone();
let proxy_url = overrides
.read_unchecked()
.as_ref()
.and_then(|overrides| overrides.proxy_url.clone());
let connecting_color = "yellow";
let connected_color = "oklch(0.55 0.1184 141.35)";
let disconnected_color = "gray";
let failed_color = "red";
let connection_status = match &*status.read() {
Connecting => rsx! {
div {
class: "connection_status",
style: "color: {connecting_color};",
div {
span {
class: "material-symbols-outlined",
"signal_cellular_alt_2_bar"
}
span {
class: "status_text",
" Connecting"
}
}
}
},
Connected => rsx! {
div {
class: "connection_status",
div {
style: "color: {connected_color};",
span {
class: "material-symbols-outlined",
"signal_cellular_alt"
}
span {
class: "status_text",
" Connected"
}
}
div {
class: "channel_text",
span { "{current_channel_name}" }
}
}
},
Disconnected => rsx! {
div {
class: "connection_status",
style: "color: {disconnected_color};",
div {
span {
class: "material-symbols-outlined",
"signal_disconnected"
}
span {
class: "status_text",
" Disconnected"
}
}
}
},
Failed(_) => rsx! {
div {
class: "connection_status",
style: "color: {failed_color};",
div {
span {
class: "material-symbols-outlined",
"error"
}
span {
class: "status_text",
" Failed"
}
}
}
},
};
rsx!(
// Server control
div {
class: "button_row",
div {
{connection_status}
}
span { class: "spacer" }
button {
class: "toggle_button",
onclick: move |_| net.send(Disconnect),
span {
class: "material-symbols-outlined",
"signal_disconnected"
}
}
}
hr { style: "width: 100%;" }
// User control
div {
class: "button_row",
button {
class: "user_edit_button",
span {
class: "material-symbols-outlined",
style: "color: oklch(0.65 0.2245 28.06);",
"person_edit"
}
}
div {
class: "user_info",
div {
span { class: "user_name", "{name}" }
}
div {
span { class: "user_data", "some data" }
}
}
span { class: "spacer" }
button {
class: match audio.denoise {
true => "toggle_button is_on",
false => "toggle_button",
},
role: "switch",
aria_checked: audio.denoise,
onclick: move |_| {
let state = use_context::<SharedState>();
let mut audio = state.audio.read().clone();
audio.denoise = !audio.denoise;
let denoise = audio.denoise;
*state.audio.write_unchecked() = audio;
net.send(UpdateAudioSettings(AudioSettings { denoise: denoise }));
let user_config = use_context::<ConfigSystem>();
user_config.config_set::<bool>("denoise", &denoise);
},
match audio.denoise {
true => rsx!(span { class: "material-symbols-outlined", "cadence"}),
false => rsx!(span { class: "material-symbols-outlined", "graphic_eq"}),
}
}
button {
class: match mute || suppress || self_mute {
true => "toggle_button is_on",
false => "toggle_button",
},
role: "switch",
aria_checked: mute || suppress || self_mute,
disabled: mute || suppress,
onclick: move |_| net.send(SetMute { mute: !self_mute }),
match mute || suppress || self_mute {
true => rsx!(span { class: "material-symbols-outlined", "mic_off"}),
false => rsx!(span { class: "material-symbols-outlined", "mic"}),
}
}
button {
class: match deaf || self_deaf {
true => "toggle_button in_on",
false => "toggle_button",
},
role: "switch",
aria_checked: deaf || self_deaf,
disabled: deaf,
onclick: move |_| net.send(SetDeaf { deaf: !self_deaf }),
match deaf || self_deaf {
true => rsx!(span { class: "material-symbols-outlined", "volume_off"}),
false => rsx!(span { class: "material-symbols-outlined", "volume_up"}),
}
}
}
)
}
#[component]
pub fn ServerView(overrides: Resource<ProxyOverrides>) -> Element {
let net: Coroutine<Command> = use_coroutine_handle();
let state = use_context::<SharedState>();
let server = state.server.read();
let Some(&UserState {
deaf,
self_deaf,
mute,
self_mute,
..
}) = server.this_user()
else {
return rsx!();
};
rsx!(
div {
class: "server_grid",
div {
class: "server_channel_box",
for (id, state) in server.channels_state.channels.iter() {
if state.parent.is_none() {
Channel { id: *id }
}
}
}
div {
class: "server_chat_box",
ChatView {}
}
div {
class: "server_control_box",
ControlView { overrides }
}
}
)
}
#[component]
pub fn LoginView(overrides: Resource<ProxyOverrides>) -> Element {
let user_config = use_context::<ConfigSystem>();
let net: Coroutine<Command> = use_coroutine_handle();
let mut address_input = use_signal(|| user_config.config_get::<String>("server_url"));
let address = use_memo(move || {
if let Some(addr) = address_input() {
addr.clone()
} else {
overrides()
.and_then(|c| c.proxy_url.clone())
.unwrap_or_default()
}
});
let last_status = use_signal(|| None::<color_eyre::Result<ServerStatus>>);
use_resource(move || {
let addr = address();
async move {
let client = reqwest::Client::new();
loop {
*last_status.write_unchecked() = Some(Platform::get_status(&client, &addr).await);
Platform::sleep(std::time::Duration::from_secs_f32(1.0)).await;
}
}
});
let mut username = use_signal(|| {
user_config
.config_get::<String>("username")
.unwrap_or(String::new())
});
let do_connect = move |_| {
let _ = user_config.config_set::<String>("username", &username.read());
if overrides.read().as_ref().is_some_and(|cfg| cfg.any_server) {
user_config.config_set::<String>("server_url", &address.read());
}
net.send(Connect {
address: address.read().clone(),
username: username.read().clone(),
config: overrides.read().clone().unwrap_or_default(),
})
};
let state = use_context::<SharedState>();
let status = &state.status;
let bottom = match &*status.read() {
Disconnected => rsx! {
button {
class: "login_bttn",
onclick: do_connect.clone(),
"Connect"
}
},
Connecting => rsx! {
div {
class: "login_bttn",
"Connecting..."
}
},
Failed(msg) => rsx!(
button {
class: "login_bttn",
onclick: do_connect.clone(),
"Reconnect"
}
div {
class: "login_error",
"Failed to connect:"
pre {
"{msg}"
}
}
),
Connected => unreachable!(),
};
let version = option_env!("MUMBLE_WEB2_VERSION");
rsx!(
div {
class: "login",
h1 {
"Mumble Web"
match version {
Some(v) => rsx!(" " span { class: "login_version", "({v})" }),
None => rsx!(),
}
}
if overrides.read().as_ref().is_some_and(|cfg| cfg.any_server) {
div {
label {
for: "address-entry",
"Server Address:"
}
input {
id: "address-entry",
placeholder: "address",
value: "{address.read()}",
autofocus: "true",
oninput: move |evt| address_input.set(Some(evt.value().clone())),
}
}
}
div {
label {
for: "username-entry",
"Username:"
//style: "color: rgba(255, 255, 255, 0.5); font-variation-settings: 'FILL' 1, 'wght' 700, 'GRAD' 0, 'opsz' 48; vertical-align: middle; font-size: 35px; user-select: none;",
}
input {
id: "username-entry",
placeholder: "username",
value: "{username.read()}",
autofocus: "true",
oninput: move |evt| username.set(evt.value().clone()),
}
}
div {
match &*last_status.read() {
None => rsx!(div {
class: "login_status",
span {"···"}
}),
Some(Ok(ServerStatus { success: false, .. })) => rsx!(div {
class: "login_status is_error",
span {
"Could not reach server"
}
}),
Some(Ok(status)) => rsx!(div {
class: "login_status",
if let (Some(users), Some(max_users)) = (status.users, status.max_users) {
span {"{users}/{max_users} Online"}
} else {
span {"Unknown Online"}
}
span {"-"}
if let Some((maj, min, pat)) = status.version {
span {"Version: {maj}.{min}.{pat}"}
} else {
span {"Unknown Version"}
}
}),
Some(Err(_)) => rsx!(div {
class: "login_status is_error",
span {
"Could not reach server"
}
}),
}
div {
{bottom}
}
}
}
)
// rsx!(
// div {
// class: "{login_box}",
// h1 {
// "Mumble Web"
// }
// input {
// placeholder: "username",
// value: "{username.read()}",
// autofocus: "true",
// oninput: move |evt| username.set(evt.value().clone()),
// }
// input {
// placeholder: "server address",
// value: "{address.read()}",
// autofocus: "true",
// oninput: move |evt| address_input.set(Some(evt.value().clone())),
// }
// {bottom}
// }
// )
}
#[component]
pub fn app() -> Element {
static STYLE: Asset = asset!("/assets/main.scss");
use_effect(|| {
Platform::request_permissions();
});
let user_config = use_root_context(|| ConfigSystem::new().unwrap());
let state = use_root_context(|| {
SharedState::new(State {
status: Signal::new(Disconnected),
server: Signal::new(Default::default()),
audio: Signal::new(AudioSettings {
denoise: user_config.config_get::<bool>("denoise").unwrap_or(true),
}),
})
});
let network_state = state.clone();
use_coroutine(move |rx: UnboundedReceiver<Command>| {
network_entrypoint(rx, network_state.clone())
});
let overrides = use_resource(|| async move {
match Platform::load_proxy_overrides().await {
Ok(overrides) => overrides,
Err(_) => ProxyOverrides::default(),
}
});
rsx!(
document::Link{ rel: "stylesheet", href: "https://fonts.googleapis.com/css2?family=Nunito:ital,wght@0,200..1000;1,200..1000&display=swap" }
document::Link{ rel: "stylesheet", href: "https://fonts.googleapis.com/css2?family=Material+Symbols+Outlined:opsz,wght,FILL,GRAD@20..48,100..700,0..1,-50..200" }
document::Link{ rel: "stylesheet", href: STYLE }
match *state.status.read() {
Connected => rsx!(ServerView { overrides }),
_ => rsx!(LoginView { overrides }),
}
)
}
use mumble_web2_gui::{app, imp};
pub fn main() {
Platform::init_logging();
dioxus::LaunchBuilder::new()
.with_cfg(desktop! {
dioxus::desktop::Config::new()
// Reduce white flash on startup by setting background color and hiding main element
.with_background_color((0, 0, 0, 255))
.with_custom_head("<style>html, body { background: black; } #main { visibility: hidden; }</style>".into())
.with_disable_context_menu(cfg!(not(debug_assertions)))
.with_window(
dioxus::desktop::WindowBuilder::new()
.with_title("Mumble Web 2")
.with_min_inner_size(dioxus::desktop::LogicalSize::new(600.0, 300.0))
.with_inner_size(dioxus::desktop::LogicalSize::new(900.0, 700.0))
.with_maximized(false),
)
})
.launch(app);
imp::init_logging();
dioxus::launch(app::app);
}
+2 -1
View File
@@ -12,7 +12,7 @@ tokio-rustls = "0.26"
toml = "0.8"
tracing = { version = "^0.1.40", features = ["async-await"] }
tracing-subscriber = { version = "^0.3.18", features = ["env-filter"] }
mumble-web2-common = { workspace = true, features = ["networking"] }
mumble-web2-common = { workspace = true }
salvo = { version = "^0.84.2", features = [
"quinn",
"eyre",
@@ -28,3 +28,4 @@ rcgen = "^0.13.2"
hmac-sha256 = "^1.1.8"
time = "0.3"
url = { version = "2", features = ["serde"] }
rand = "0.9.2"
+97 -23
View File
@@ -1,5 +1,10 @@
use color_eyre::eyre::{anyhow, bail, Context, Result};
use mumble_web2_common::{ping_server, ProxyOverrides, ServerStatus};
use color_eyre::owo_colors::OwoColorize;
use mumble_web2_common::{ClientConfig, ServerStatus};
use once_cell::sync::OnceCell;
use rand::Rng;
use rcgen::date_time_ymd;
use rustls::server;
use salvo::conn::rustls::{Keycert, RustlsConfig};
use salvo::cors::{AllowOrigin, Cors};
use salvo::logging::Logger;
@@ -15,7 +20,7 @@ use tokio::net::TcpStream;
use tokio::pin;
use tokio_rustls::rustls::client::danger::{HandshakeSignatureValid, ServerCertVerifier};
use tokio_rustls::rustls::pki_types::{CertificateDer, ServerName, UnixTime};
use tokio_rustls::rustls::{ClientConfig, DigitallySignedStruct};
use tokio_rustls::rustls::{ClientConfig as RlsClientConfig, DigitallySignedStruct};
use tokio_rustls::{rustls, TlsConnector};
use tracing::info;
use tracing::info_span;
@@ -25,12 +30,15 @@ use tracing_subscriber::filter::LevelFilter;
use tracing_subscriber::EnvFilter;
use url::Url;
mod ping;
fn default_cert_alt_names() -> Vec<String> {
vec!["localhost".into()]
}
#[derive(Debug, Deserialize, Serialize)]
struct Config {
public_url: Url,
proxy_url: Option<Url>,
https_listen_address: SocketAddr,
http_listen_address: Option<SocketAddr>,
@@ -74,11 +82,12 @@ async fn main() -> Result<()> {
.install_default()
.map_err(|e| anyhow!("could not install crypto provider {e:?}"))?;
let mut overrides = ProxyOverrides {
let mut client_config = ClientConfig {
proxy_url: match &server_config.proxy_url {
Some(url) => Some(url.to_string()),
None => None,
None => Some(server_config.public_url.join("proxy")?.to_string()),
},
status_url: Some(server_config.public_url.join("status")?.to_string()),
cert_hash: None,
any_server: false,
};
@@ -99,7 +108,7 @@ async fn main() -> Result<()> {
let cert = cert_params.self_signed(&key_pair)?;
let hash = hmac_sha256::Hash::hash(cert.der().as_ref());
overrides.cert_hash = Some(hash.into());
client_config.cert_hash = Some(hash.into());
(cert.pem().into(), key_pair.serialize_pem().into())
}
@@ -119,11 +128,14 @@ async fn main() -> Result<()> {
};
let rustls_config = RustlsConfig::new(Keycert::new().cert(cert.as_slice()).key(key.as_slice()));
info!("proxy overrides:\n{}", toml::to_string_pretty(&overrides)?);
info!(
"client config:\n{}",
toml::to_string_pretty(&client_config)?
);
let config_craft = ConfigCraft {
server_config: server_config.clone(),
overrides,
client_config,
};
let status_craft = StatusCraft {
@@ -133,7 +145,7 @@ async fn main() -> Result<()> {
// Server routing
let mut router = Router::new()
.push(Router::with_path("/proxy").goal(config_craft.connect_proxy()))
.push(Router::with_path("/overrides").get(config_craft.get_overrides()))
.push(Router::with_path("/config").get(config_craft.get_config()))
.push(Router::with_path("/status").get(status_craft.get_status()))
.hoop(Logger::new());
if let Some(gui_path) = server_config.gui_path.clone() {
@@ -176,28 +188,84 @@ pub struct StatusCraft {
impl StatusCraft {
#[craft(handler)]
async fn get_status(&self) -> Json<ServerStatus> {
let addr = self.mumble_server_address;
match ping_server(&addr.ip().to_string(), addr.port()).await {
Ok(status) => Json(status),
let mut server_status = ServerStatus::default();
let ping_packet = ping::PingPacket {
id: rand::rng().random(),
};
let sock = match tokio::net::UdpSocket::bind("0.0.0.0:0").await {
Ok(s) => s,
Err(e) => {
error!("ping failed: {e:#}");
Json(ServerStatus::default())
error!("Could not bind udp socket: {}", e);
return Json(server_status);
}
};
match sock.connect(self.mumble_server_address).await {
Ok(_) => {}
Err(e) => {
error!("Could not send ping packet: {}", e);
return Json(server_status);
}
}
match sock.send(&<[u8; 12]>::from(ping_packet)).await {
Ok(_) => {}
Err(e) => {
error!("Could not send ping packet");
return Json(server_status);
}
}
let mut pong_buf: [u8; 24] = [0; 24];
match tokio::time::timeout(
tokio::time::Duration::from_secs(1),
sock.recv(&mut pong_buf),
)
.await
{
Ok(_) => {}
Err(e) => {
error!("Could not send ping packet");
return Json(server_status);
}
}
let pong_packet = match ping::PongPacket::try_from(pong_buf.as_slice()) {
Ok(p) => p,
Err(e) => {
error!("Could not parse pong packet: {:?}", e);
return Json(server_status);
}
};
server_status.success = true;
server_status.version = Some((
pong_packet.version & 0xFF,
(pong_packet.version >> 8) & 0xFF,
(pong_packet.version >> 16) & 0xFF,
));
server_status.users = Some(pong_packet.users);
server_status.max_users = Some(pong_packet.max_users);
server_status.bandwidth = Some(pong_packet.bandwidth);
Json(server_status)
}
}
#[derive(Clone)]
pub struct ConfigCraft {
server_config: Arc<Config>,
overrides: ProxyOverrides,
client_config: ClientConfig,
}
#[craft]
impl ConfigCraft {
#[craft(handler)]
async fn get_overrides(&self) -> Json<ProxyOverrides> {
Json(self.overrides.clone())
async fn get_config(&self) -> Json<ClientConfig> {
Json(self.client_config.clone())
}
#[craft(handler)]
@@ -258,7 +326,7 @@ async fn connect_proxy_impl(
) -> Result<()> {
info!("connecting to Mumble server...");
let config = ClientConfig::builder()
let config = RlsClientConfig::builder()
.dangerous()
.with_custom_certificate_verifier(Arc::new(NoCertificateVerification))
.with_no_client_auth();
@@ -273,13 +341,19 @@ async fn connect_proxy_impl(
info!("connected to Mumble server");
// Handle transmitting data between the WebTransport client and Mumble TCP Server
// When one direction completes/fails, the other is dropped and its streams are closed
// Spawn tasks to handle transmitting data between the WebTransport client and Mumble TCP Server
let c2s = tokio::spawn(
pass_bytes_loop(incoming, write_server)
.instrument(info_span!("Handler", "Client to server")),
);
let s2c = tokio::spawn(
pass_bytes_loop(read_server, outgoing)
.instrument(info_span!("Handler", "Server to client")),
);
tokio::select! {
res = pass_bytes_loop(incoming, write_server)
.instrument(info_span!("Handler", "Client to server")) => res?,
res = pass_bytes_loop(read_server, outgoing)
.instrument(info_span!("Handler", "Server to client")) => res?,
res = c2s => res??,
res = s2c => res??,
};
Ok(())
}
+141
View File
@@ -0,0 +1,141 @@
// This code was taken from mumble-protocol-2x (https://github.com/dblsaiko/rust-mumble-protocol)
// and originally from mumble-protocol (https://github.com/Johni0702/rust-mumble-protocol)
// These projects are licensed under MIT and Apache 2.0.
//! Ping messages and codec
//!
//! A Mumble client can send periodic UDP [PingPacket]s to servers
//! in order to query their current state and measure latency.
//! A server will usually respond with a corresponding [PongPacket] containing
//! the requested details.
//!
//! Both packets are of fixed size and can be converted to/from `u8` arrays/slices via
//! the respective `From`/`TryFrom` impls.
/// A ping packet sent to the server.
#[derive(Clone, Debug, PartialEq)]
pub struct PingPacket {
/// Opaque, client-generated id.
///
/// Will be returned by the server unmodified and can be used to correlate
/// pong replies to ping requests to e.g. calculate latency.
pub id: u64,
}
/// A pong packet sent to the client in reply to a previously received [PingPacket].
#[derive(Clone, Debug, PartialEq)]
pub struct PongPacket {
/// Opaque, client-generated id.
///
/// Should match the value in the corresponding [PingPacket].
pub id: u64,
/// Server version. E.g. `0x010300` for `1.3.0`.
pub version: u32,
/// Current amount of users connected to the server.
pub users: u32,
/// Configured limit on the amount of users which can be connected to the server.
pub max_users: u32,
/// Maximum bandwidth for server-bound speech per client in bits per second
pub bandwidth: u32,
}
/// Error during parsing of a [PingPacket].
#[derive(Clone, Debug, PartialEq)]
pub enum ParsePingError {
/// Ping packets must always be 12 bytes in size.
InvalidSize,
/// Ping packets must have an all zero header of 4 bytes.
InvalidHeader,
}
impl TryFrom<&[u8]> for PingPacket {
type Error = ParsePingError;
fn try_from(buf: &[u8]) -> Result<Self, Self::Error> {
match <[u8; 12]>::try_from(buf) {
Ok(array) => {
if array[0..4] != [0, 0, 0, 0] {
Err(ParsePingError::InvalidHeader)
} else {
Ok(Self {
id: u64::from_be_bytes(array[4..12].try_into().unwrap()),
})
}
}
Err(_) => Err(ParsePingError::InvalidSize),
}
}
}
impl From<PingPacket> for [u8; 12] {
fn from(packet: PingPacket) -> Self {
let id = packet.id.to_be_bytes();
// Is there no nicer way to do this?
[
0, 0, 0, 0, id[0], id[1], id[2], id[3], id[4], id[5], id[6], id[7],
]
}
}
/// Error during parsing of a [PongPacket].
#[derive(Clone, Debug, PartialEq)]
pub enum ParsePongError {
/// Pong packets must always be 24 bytes in size.
InvalidSize,
}
impl TryFrom<&[u8]> for PongPacket {
type Error = ParsePongError;
fn try_from(buf: &[u8]) -> Result<Self, Self::Error> {
match <[u8; 24]>::try_from(buf) {
Ok(array) => Ok(Self {
version: u32::from_be_bytes(array[0..4].try_into().unwrap()),
id: u64::from_be_bytes(array[4..12].try_into().unwrap()),
users: u32::from_be_bytes(array[12..16].try_into().unwrap()),
max_users: u32::from_be_bytes(array[16..20].try_into().unwrap()),
bandwidth: u32::from_be_bytes(array[20..24].try_into().unwrap()),
}),
Err(_) => Err(ParsePongError::InvalidSize),
}
}
}
impl From<PongPacket> for [u8; 24] {
fn from(packet: PongPacket) -> Self {
let version = packet.version.to_be_bytes();
let id = packet.id.to_be_bytes();
let users = packet.users.to_be_bytes();
let max_users = packet.max_users.to_be_bytes();
let bandwidth = packet.bandwidth.to_be_bytes();
// Is there no nicer way to do this?
[
version[0],
version[1],
version[2],
version[3],
id[0],
id[1],
id[2],
id[3],
id[4],
id[5],
id[6],
id[7],
users[0],
users[1],
users[2],
users[3],
max_users[0],
max_users[1],
max_users[2],
max_users[3],
bandwidth[0],
bandwidth[1],
bandwidth[2],
bandwidth[3],
]
}
}
-19
View File
@@ -1,19 +0,0 @@
[package]
name = "mumble-web2-tui"
version = "0.1.0"
edition = "2021"
[dependencies]
mumble-web2-client = { version = "0.1.0", path = "../client", features = ["desktop", "embed-denoiser"] }
mumble-web2-common = { version = "0.1.0", path = "../common" }
ratatui = "0.29"
crossterm = { version = "0.28", features = ["event-stream"] }
tokio = { version = "^1.41.1", features = ["rt", "macros"] }
futures-channel = "^0.3.30"
futures = "^0.3.30"
dioxus-signals = "0.7.2"
dioxus-core = "0.7.2"
generational-box = "0.7.2"
color-eyre = "^0.6.3"
tracing-subscriber = { version = "^0.3.18", features = ["env-filter"] }
tracing = "^0.1.40"
-775
View File
@@ -1,775 +0,0 @@
use std::cell::RefCell;
use crossterm::event::{Event, KeyCode, KeyEventKind, KeyModifiers};
use dioxus_core::with_owner;
use futures_channel::mpsc;
use generational_box::Owner;
use mumble_web2_client::{
network_entrypoint, AudioSettings, ChannelId, Command, ConfigSystem,
ConfigSystemInterface as _, ConnectionState, Platform, PlatformInterface as _, ServerState,
UserState,
};
use mumble_web2_common::ProxyOverrides;
use ratatui::{
layout::{Constraint, Direction, Layout},
style::{Color, Modifier, Style},
text::{Line, Span},
widgets::{Block, Borders, Clear, List, ListItem, Paragraph, Wrap},
Frame,
};
pub struct RefCellReactivity;
impl mumble_web2_client::Reactivity for RefCellReactivity {
type Signal<T> = RefCell<T>;
fn new<T: 'static>(value: T) -> Self::Signal<T> {
RefCell::new(value)
}
fn read<T: 'static>(signal: &Self::Signal<T>) -> impl std::ops::Deref<Target = T> {
signal.borrow()
}
fn write<T: 'static>(signal: &Self::Signal<T>) -> impl std::ops::DerefMut<Target = T> {
signal.borrow_mut()
}
}
pub type State = mumble_web2_client::State<RefCellReactivity>;
pub type SharedState = mumble_web2_client::SharedState<RefCellReactivity>;
// ---------------------------------------------------------------------------
// App state (TUI-local, not shared with client)
// ---------------------------------------------------------------------------
#[derive(Clone, Copy, PartialEq, Eq)]
enum Focus {
Address,
Username,
}
#[derive(Clone, Copy, PartialEq, Eq)]
enum Pane {
Channels,
Chat,
}
struct App {
state: SharedState,
tx: mpsc::UnboundedSender<Command>,
config: ConfigSystem,
overrides: ProxyOverrides,
// Login fields
address: String,
username: String,
login_focus: Focus,
// Server view
active_pane: Pane,
chat_input: String,
chat_focused: bool,
channel_list: Vec<(ChannelId, u16)>, // (id, depth) - flattened tree for navigation
channel_cursor: usize,
should_quit: bool,
}
impl App {
fn new(
state: SharedState,
tx: mpsc::UnboundedSender<Command>,
config: ConfigSystem,
overrides: ProxyOverrides,
) -> Self {
let address = config
.config_get::<String>("server_url")
.or_else(|| overrides.proxy_url.clone())
.unwrap_or_default();
let username = config.config_get::<String>("username").unwrap_or_default();
Self {
state,
tx,
config,
overrides,
address,
username,
login_focus: Focus::Username,
active_pane: Pane::Channels,
chat_input: String::new(),
chat_focused: false,
channel_list: Vec::new(),
channel_cursor: 0,
should_quit: false,
}
}
fn send(&self, cmd: Command) {
let _ = self.tx.unbounded_send(cmd);
}
fn is_connected(&self) -> bool {
matches!(&*self.state.status.borrow(), ConnectionState::Connected)
}
// Build a flat list of (channel_id, depth) by walking the tree.
fn rebuild_channel_list(&mut self) {
self.channel_list.clear();
let server = self.state.server.borrow();
// Find root channels (no parent)
let mut roots: Vec<ChannelId> = server
.channels_state
.channels
.iter()
.filter(|(_, ch)| ch.parent.is_none())
.map(|(&id, _)| id)
.collect();
roots.sort();
for root in roots {
Self::walk_channel(&mut self.channel_list, &server, root, 0);
}
}
fn walk_channel(
list: &mut Vec<(ChannelId, u16)>,
server: &ServerState,
id: ChannelId,
depth: u16,
) {
list.push((id, depth));
let Some(ch) = server.channels_state.channels.get(&id) else {
return;
};
for &child in ch.children.iter() {
Self::walk_channel(list, server, child, depth + 1);
}
}
}
// ---------------------------------------------------------------------------
// User icon helpers
// ---------------------------------------------------------------------------
fn user_indicator(user: &UserState) -> &'static str {
if user.deaf || user.self_deaf {
"D"
} else if user.mute || user.self_mute {
"M"
} else if user.suppress {
"S"
} else {
" "
}
}
fn user_style(user: &UserState) -> Style {
if user.deaf || user.self_deaf {
Style::default().fg(Color::Red)
} else if user.mute || user.self_mute || user.suppress {
Style::default().fg(Color::Yellow)
} else {
Style::default().fg(Color::Green)
}
}
// ---------------------------------------------------------------------------
// Rendering
// ---------------------------------------------------------------------------
fn draw(frame: &mut Frame, app: &mut App) {
if app.is_connected() {
draw_server(frame, app);
} else {
draw_login(frame, app);
}
}
fn draw_login(frame: &mut Frame, app: &App) {
let area = frame.area();
// Center a box
let vert = Layout::default()
.direction(Direction::Vertical)
.constraints([
Constraint::Min(0),
Constraint::Length(10),
Constraint::Min(0),
])
.split(area);
let horiz = Layout::default()
.direction(Direction::Horizontal)
.constraints([
Constraint::Min(0),
Constraint::Length(50),
Constraint::Min(0),
])
.split(vert[1]);
let box_area = horiz[1];
let block = Block::default()
.title(" Mumble Web 2 ")
.borders(Borders::ALL);
let inner = block.inner(box_area);
frame.render_widget(Clear, box_area);
frame.render_widget(block, box_area);
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([
Constraint::Length(1), // address label
Constraint::Length(1), // address input
Constraint::Length(1), // spacer
Constraint::Length(1), // username label
Constraint::Length(1), // username input
Constraint::Length(1), // spacer
Constraint::Length(1), // status / button hint
Constraint::Min(0),
])
.split(inner);
let status = &*app.state.status.borrow();
// Address
if app.overrides.any_server {
let label_style = if app.login_focus == Focus::Address {
Style::default()
.fg(Color::Cyan)
.add_modifier(Modifier::BOLD)
} else {
Style::default()
};
frame.render_widget(
Paragraph::new("Server Address:").style(label_style),
chunks[0],
);
let input_style = if app.login_focus == Focus::Address {
Style::default().fg(Color::White)
} else {
Style::default().fg(Color::DarkGray)
};
frame.render_widget(
Paragraph::new(format!("> {}", app.address)).style(input_style),
chunks[1],
);
}
// Username
let label_style = if app.login_focus == Focus::Username {
Style::default()
.fg(Color::Cyan)
.add_modifier(Modifier::BOLD)
} else {
Style::default()
};
frame.render_widget(Paragraph::new("Username:").style(label_style), chunks[3]);
let input_style = if app.login_focus == Focus::Username {
Style::default().fg(Color::White)
} else {
Style::default().fg(Color::DarkGray)
};
frame.render_widget(
Paragraph::new(format!("> {}", app.username)).style(input_style),
chunks[4],
);
// Status line
let status_line = match status {
ConnectionState::Disconnected => Line::from(Span::styled(
"[Enter] Connect",
Style::default().fg(Color::Green),
)),
ConnectionState::Connecting => Line::from(Span::styled(
"Connecting...",
Style::default().fg(Color::Yellow),
)),
ConnectionState::Failed(msg) => Line::from(vec![
Span::styled("Failed: ", Style::default().fg(Color::Red)),
Span::raw(msg.clone()),
Span::styled(" [Enter] Retry", Style::default().fg(Color::Green)),
]),
ConnectionState::Connected => unreachable!(),
};
frame.render_widget(Paragraph::new(status_line), chunks[6]);
}
fn draw_server(frame: &mut Frame, app: &mut App) {
app.rebuild_channel_list();
let server = app.state.server.borrow();
let audio = app.state.audio.borrow();
// Main layout: channels left, chat right, controls bottom
let vert = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Min(0), Constraint::Length(3)])
.split(frame.area());
let horiz = Layout::default()
.direction(Direction::Horizontal)
.constraints([Constraint::Percentage(35), Constraint::Percentage(65)])
.split(vert[0]);
// --- Channel tree ---
let chan_block = Block::default()
.title(" Channels ")
.borders(Borders::ALL)
.border_style(if app.active_pane == Pane::Channels && !app.chat_focused {
Style::default().fg(Color::Cyan)
} else {
Style::default()
});
let mut items: Vec<ListItem> = Vec::new();
for (i, &(ch_id, depth)) in app.channel_list.iter().enumerate() {
let Some(ch) = server.channels_state.channels.get(&ch_id) else {
continue;
};
let indent = " ".repeat(depth as usize);
let marker = if ch.children.is_empty() { " " } else { "" };
let is_selected = i == app.channel_cursor;
let style = if is_selected {
Style::default()
.fg(Color::Cyan)
.add_modifier(Modifier::BOLD)
} else {
Style::default().fg(Color::White)
};
let prefix = if is_selected { ">" } else { " " };
// Channel name line
let mut lines = vec![Line::from(Span::styled(
format!("{prefix}{indent}{marker}{}", ch.name),
style,
))];
// Users in this channel
for &uid in ch.users.iter() {
if let Some(user) = server.users.get(&uid) {
let is_self = server.session == Some(uid);
let ind = user_indicator(user);
let u_style = if is_self {
user_style(user).add_modifier(Modifier::UNDERLINED)
} else {
user_style(user)
};
lines.push(Line::from(Span::styled(
format!(" {indent} [{ind}] {}", user.name),
u_style,
)));
}
}
items.push(ListItem::new(lines));
}
let channel_list = List::new(items).block(chan_block);
frame.render_widget(channel_list, horiz[0]);
// --- Chat panel ---
let chat_area = horiz[1];
let chat_layout = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Min(0), Constraint::Length(3)])
.split(chat_area);
let chat_block = Block::default()
.title(" Chat ")
.borders(Borders::ALL)
.border_style(if app.active_pane == Pane::Chat && !app.chat_focused {
Style::default().fg(Color::Cyan)
} else {
Style::default()
});
let chat_lines: Vec<Line> = server
.chat
.iter()
.map(|msg| {
let sender = msg
.sender
.and_then(|uid| server.users.get(&uid))
.map(|u| u.name.as_str())
.unwrap_or("server");
Line::from(vec![
Span::styled(
format!("{sender}: "),
Style::default()
.fg(Color::Cyan)
.add_modifier(Modifier::BOLD),
),
Span::raw(&msg.raw),
])
})
.collect();
// Show last N lines that fit
let chat_inner_height = chat_block.inner(chat_layout[0]).height as usize;
let skip = chat_lines.len().saturating_sub(chat_inner_height);
let visible_lines: Vec<Line> = chat_lines.into_iter().skip(skip).collect();
let chat_widget = Paragraph::new(visible_lines)
.block(chat_block)
.wrap(Wrap { trim: false });
frame.render_widget(chat_widget, chat_layout[0]);
// Chat input
let input_block = Block::default()
.title(if app.chat_focused {
" Input (Esc to cancel) "
} else {
" [t] to type "
})
.borders(Borders::ALL)
.border_style(if app.chat_focused {
Style::default().fg(Color::Green)
} else {
Style::default()
});
let input_widget = Paragraph::new(app.chat_input.as_str()).block(input_block);
frame.render_widget(input_widget, chat_layout[1]);
// --- Controls bar ---
let this_user = server.this_user();
let (self_mute, mute, suppress, self_deaf, deaf) = this_user
.map(|u| (u.self_mute, u.mute, u.suppress, u.self_deaf, u.deaf))
.unwrap_or_default();
let muted = mute || suppress || self_mute;
let deafened = deaf || self_deaf;
let status_text = match &*app.state.status.borrow() {
ConnectionState::Connected => "Connected",
ConnectionState::Connecting => "Connecting",
ConnectionState::Disconnected => "Disconnected",
ConnectionState::Failed(_) => "Failed",
};
let current_channel = this_user
.and_then(|u| server.channels_state.channels.get(&u.channel))
.map(|ch| ch.name.as_str())
.unwrap_or("?");
let controls = Line::from(vec![
Span::styled(
format!(" {status_text} "),
Style::default().fg(Color::Green),
),
Span::styled(
format!("#{current_channel} "),
Style::default().fg(Color::White),
),
Span::raw(""),
Span::styled(
if muted { "[m]ute ✓ " } else { "[m]ute " },
if muted {
Style::default().fg(Color::Yellow)
} else {
Style::default()
},
),
Span::styled(
if deafened { "[d]eaf ✓ " } else { "[d]eaf " },
if deafened {
Style::default().fg(Color::Red)
} else {
Style::default()
},
),
Span::styled(
if audio.denoise {
"[n]oise ✓ "
} else {
"[n]oise "
},
if audio.denoise {
Style::default().fg(Color::Cyan)
} else {
Style::default()
},
),
Span::raw(""),
Span::styled("[q]uit", Style::default().fg(Color::DarkGray)),
]);
let controls_block = Block::default().borders(Borders::ALL);
let controls_widget = Paragraph::new(controls).block(controls_block);
frame.render_widget(controls_widget, vert[1]);
}
// ---------------------------------------------------------------------------
// Event handling
// ---------------------------------------------------------------------------
fn handle_login_key(app: &mut App, code: KeyCode) {
match code {
KeyCode::Tab | KeyCode::BackTab => {
app.login_focus = match app.login_focus {
Focus::Address => Focus::Username,
Focus::Username => {
if app.overrides.any_server {
Focus::Address
} else {
Focus::Username
}
}
};
}
KeyCode::Enter => {
let status = &*app.state.status.borrow();
if matches!(
status,
ConnectionState::Disconnected | ConnectionState::Failed(_)
) {
app.config.config_set::<String>("username", &app.username);
if app.overrides.any_server {
app.config.config_set::<String>("server_url", &app.address);
}
app.send(Command::Connect {
address: app.address.clone(),
username: app.username.clone(),
config: app.overrides.clone(),
});
}
}
KeyCode::Char(c) => {
let field = match app.login_focus {
Focus::Address => &mut app.address,
Focus::Username => &mut app.username,
};
field.push(c);
}
KeyCode::Backspace => {
let field = match app.login_focus {
Focus::Address => &mut app.address,
Focus::Username => &mut app.username,
};
field.pop();
}
KeyCode::Esc => {
app.should_quit = true;
}
_ => {}
}
}
fn handle_server_key(app: &mut App, code: KeyCode) {
if app.chat_focused {
match code {
KeyCode::Esc => {
app.chat_focused = false;
}
KeyCode::Enter => {
if !app.chat_input.is_empty() {
let server = app.state.server.borrow();
if let Some(user) = server.this_user() {
let channels = vec![user.channel];
let markdown = std::mem::take(&mut app.chat_input);
drop(server);
app.send(Command::SendChat { markdown, channels });
}
}
}
KeyCode::Char(c) => {
app.chat_input.push(c);
}
KeyCode::Backspace => {
app.chat_input.pop();
}
_ => {}
}
return;
}
match code {
KeyCode::Char('q') => {
app.send(Command::Disconnect);
app.should_quit = true;
}
KeyCode::Char('m') => {
let server = app.state.server.borrow();
if let Some(user) = server.this_user() {
if !user.mute && !user.suppress {
let new_mute = !user.self_mute;
drop(server);
app.send(Command::SetMute { mute: new_mute });
}
}
}
KeyCode::Char('d') => {
let server = app.state.server.borrow();
if let Some(user) = server.this_user() {
if !user.deaf {
let new_deaf = !user.self_deaf;
drop(server);
app.send(Command::SetDeaf { deaf: new_deaf });
}
}
}
KeyCode::Char('n') => {
let audio = app.state.audio.borrow().clone();
let new_denoise = !audio.denoise;
*app.state.audio.borrow_mut() = AudioSettings {
denoise: new_denoise,
};
app.send(Command::UpdateAudioSettings(AudioSettings {
denoise: new_denoise,
}));
app.config.config_set::<bool>("denoise", &new_denoise);
}
KeyCode::Char('t') => {
app.chat_focused = true;
}
KeyCode::Tab => {
app.active_pane = match app.active_pane {
Pane::Channels => Pane::Chat,
Pane::Chat => Pane::Channels,
};
}
KeyCode::Char('j') | KeyCode::Down => {
if !app.channel_list.is_empty() {
app.channel_cursor = (app.channel_cursor + 1).min(app.channel_list.len() - 1);
}
}
KeyCode::Char('k') | KeyCode::Up => {
app.channel_cursor = app.channel_cursor.saturating_sub(1);
}
KeyCode::Enter => {
if let Some(&(ch_id, _)) = app.channel_list.get(app.channel_cursor) {
let server = app.state.server.borrow();
if let Some(uid) = server.session {
drop(server);
app.send(Command::EnterChannel {
channel: ch_id,
user: uid,
});
}
}
}
_ => {}
}
}
fn handle_event(app: &mut App, ev: Event) {
let Event::Key(key) = ev else { return };
if key.kind != KeyEventKind::Press {
return;
}
// Ctrl-C always quits
if key.code == KeyCode::Char('c') && key.modifiers.contains(KeyModifiers::CONTROL) {
app.send(Command::Disconnect);
app.should_quit = true;
return;
}
if app.is_connected() {
handle_server_key(app, key.code);
} else {
handle_login_key(app, key.code);
}
}
// ---------------------------------------------------------------------------
// Main
// ---------------------------------------------------------------------------
fn init_file_logging() -> color_eyre::Result<()> {
use tracing::level_filters::LevelFilter;
use tracing_subscriber::filter::EnvFilter;
let log_path = std::env::var("MUMBLE_TUI_LOG")
.unwrap_or_else(|_| std::env::temp_dir().join("mumble-tui.log").to_string_lossy().into_owned());
let file = std::fs::OpenOptions::new()
.create(true)
.append(true)
.open(&log_path)?;
let env_filter = EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env_lossy();
tracing_subscriber::fmt()
.with_target(true)
.with_level(true)
.with_ansi(false)
.with_env_filter(env_filter)
.with_writer(file)
.init();
eprintln!("logging to {log_path}");
Ok(())
}
fn main() -> color_eyre::Result<()> {
color_eyre::install()?;
init_file_logging()?;
// Use a single-threaded runtime since dioxus Signals are !Send.
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()?;
let local = tokio::task::LocalSet::new();
local.block_on(&rt, async {
let config = ConfigSystem::new()?;
let overrides = Platform::load_proxy_overrides().await.unwrap_or_default();
let state = SharedState::new(State {
status: RefCell::new(ConnectionState::Disconnected),
server: RefCell::new(Default::default()),
audio: RefCell::new(AudioSettings {
denoise: config.config_get::<bool>("denoise").unwrap_or(true),
}),
});
let (tx, rx) = mpsc::unbounded::<Command>();
// Spawn the network loop on the local task set (not Send-bound).
let net_state = state.clone();
tokio::task::spawn_local(async move {
network_entrypoint(rx, net_state).await;
});
// Setup terminal
crossterm::terminal::enable_raw_mode()?;
let mut stdout = std::io::stdout();
crossterm::execute!(
stdout,
crossterm::terminal::EnterAlternateScreen,
crossterm::event::EnableMouseCapture
)?;
let backend = ratatui::backend::CrosstermBackend::new(stdout);
let mut terminal = ratatui::Terminal::new(backend)?;
let mut app = App::new(state, tx, config, overrides);
// Event loop
loop {
terminal.draw(|f| draw(f, &mut app))?;
if app.should_quit {
break;
}
// Poll with a short timeout so we re-render when state changes.
// Yield to the tokio runtime between polls so network tasks can progress.
if crossterm::event::poll(std::time::Duration::from_millis(16))? {
let ev = crossterm::event::read()?;
handle_event(&mut app, ev);
}
tokio::task::yield_now().await;
}
// Restore terminal
crossterm::terminal::disable_raw_mode()?;
crossterm::execute!(
terminal.backend_mut(),
crossterm::terminal::LeaveAlternateScreen,
crossterm::event::DisableMouseCapture
)?;
terminal.show_cursor()?;
Ok(())
})
}