4 Commits

Author SHA1 Message Date
restitux cd90cb628b ci: correct windows dockerfile path
Build Mumble Web 2 / linux_build (push) Successful in 1m41s
Build Mumble Web 2 / windows_build (push) Successful in 2m31s
Build Mumble Web 2 release builder containers / windows-release-builder-container-build (push) Successful in 17s
2025-12-05 14:24:43 -07:00
restitux 37613a65c4 web: fix audio on firefox
Build Mumble Web 2 / windows_build (push) Successful in 2m39s
Build Mumble Web 2 / linux_build (push) Successful in 4m57s
2025-12-05 01:54:12 -07:00
sam d6b482528f Load status from relative url (#5)
Build Mumble Web 2 / linux_build (push) Successful in 2m23s
Build Mumble Web 2 / windows_build (push) Successful in 2m33s
Remove public_url config option
Use proxy_url instead for example configs
Get status from relative endpoint, like /config
Show version on login page

Reviewed-on: #5
Co-authored-by: Sam Sartor <me@samsartor.com>
Co-committed-by: Sam Sartor <me@samsartor.com>
2025-12-05 07:00:38 +00:00
sam 5df7b0e082 Fix audio on windows (#3)
Build Mumble Web 2 / linux_build (push) Successful in 1m42s
Build Mumble Web 2 / windows_build (push) Successful in 2m43s
Reviewed-on: #3

Automatically choose supported profile
Play stream once created

Co-authored-by: Sam Sartor <me@samsartor.com>
Co-committed-by: Sam Sartor <me@samsartor.com>
2025-12-05 05:34:07 +00:00
14 changed files with 253 additions and 130 deletions
@@ -22,6 +22,6 @@ jobs:
- name: Build Windows image
shell: bash
run: |
docker pull "$(grep -m1 '^FROM' Dockerfile | awk '{print $2}')"
docker pull "$(grep -m1 '^FROM' ./docker/windows-release-builder.Dockerfile | awk '{print $2}')"
docker build -t git.ohea.xyz/mumble/mumble-web2/windows-release-builder:latest -f ./docker/windows-release-builder.Dockerfile .
docker push git.ohea.xyz/mumble/mumble-web2/windows-release-builder:latest
-1
View File
@@ -3,7 +3,6 @@ use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
pub struct ClientConfig {
pub proxy_url: Option<String>,
pub status_url: Option<String>,
pub cert_hash: Option<Vec<u8>>,
pub any_server: bool,
}
+1 -1
View File
@@ -1,4 +1,4 @@
public_url = "https://127.0.0.1:4433"
proxy_url = "https://127.0.0.1:4433/proxy"
https_listen_address = "127.0.0.1:4433"
http_listen_address = "127.0.0.1:8080"
mumble_server_url = "[SERVER_URL_HERE]"
-1
View File
@@ -1,4 +1,3 @@
public_url = "https://localhost:64444"
proxy_url = "https://127.0.0.1:4433/proxy"
https_listen_address = "127.0.0.1:4433"
http_listen_address = "127.0.0.1:4400"
+6 -4
View File
@@ -30,8 +30,6 @@ web-sys = { version = "^0.3.72", features = [
"EncodedAudioChunkInit",
"EncodedAudioChunkType",
"CodecState",
"MediaStreamTrackGenerator",
"MediaStreamTrackGeneratorInit",
"AudioContext",
"AudioContextOptions",
"MediaStream",
@@ -42,6 +40,7 @@ web-sys = { version = "^0.3.72", features = [
"AudioWorkletNode",
"AudioWorklet",
"AudioWorkletProcessor",
"MessagePort",
"MediaStreamConstraints",
"WorkletOptions",
"AudioEncoder",
@@ -88,7 +87,7 @@ tracing = "^0.1.40"
color-eyre = "^0.6.3"
crossbeam-queue = "^0.3.11"
lol_html = "^2.2.0"
rfd = { git = "https://github.com/samsartor/rfd.git", version = "^0.16.0", default-features = false }
rfd = { git = "https://github.com/samsartor/rfd.git", version = "^0.16.0", default-features = false }
base64 = "^0.22"
mime_guess = "^2.0.5"
async_cell = "^0.2.3"
@@ -97,7 +96,9 @@ dioxus-asset-resolver = "0.7.1"
# Denoising
# =========
deep_filter = { git = "https://github.com/Rikorose/DeepFilterNet.git", rev = "d375b2d8309e0935d165700c91da9de862a99c31", features = ["tract"] }
deep_filter = { git = "https://github.com/Rikorose/DeepFilterNet.git", rev = "d375b2d8309e0935d165700c91da9de862a99c31", features = [
"tract",
] }
crossbeam = "0.8.4"
[patch.crates-io]
@@ -131,3 +132,4 @@ desktop = [
"rfd/xdg-portal",
"rfd/tokio",
]
+5
View File
@@ -279,6 +279,11 @@ a:visited {
color: #b3c6b4;
}
&_version {
color: var(--txt-color);
font-weight: normal;
}
&_bttn {
font-weight: bold;
font-size: large;
@@ -1,7 +1,7 @@
const SAMPLE_RATE = 48000;
const PACKET_SAMPLES = 960;
class RustWorklet extends AudioWorkletProcessor {
class RustMicWorklet extends AudioWorkletProcessor {
constructor(options) {
super();
this.module = options.processorOptions;
@@ -31,7 +31,7 @@ class RustWorklet extends AudioWorkletProcessor {
}
this.buffer_offset -= PACKET_SAMPLES;
this.timestamp = null;
}
}
process(inputs) {
//console.log(inputs);
@@ -60,4 +60,44 @@ class RustWorklet extends AudioWorkletProcessor {
}
};
registerProcessor("rust_mic_worklet", RustWorklet);
class RustSpeakerWorklet extends AudioWorkletProcessor {
constructor() {
super();
this.queue = [];
this.readIndex = 0;
this.port.onmessage = (event) => {
this.queue.push(event.data)
};
}
process(inputs, outputs) {
if (this.queue.length) {
console.log(this.queue[0].samples.length, outputs[0][0].length);
}
const output = outputs[0];
for (let i = 0; i < output[0].length; i++) {
if (!this.queue.length) {
return true;
}
const current = this.queue[0];
for (let ch = 0; ch < output.length; ch++) {
output[ch][i] = current.samples[this.readIndex];
}
this.readIndex++;
if (this.readIndex >= current.samples.length) {
this.queue.shift();
this.readIndex = 0;
}
}
return true;
}
};
registerProcessor("rust_mic_worklet", RustMicWorklet);
registerProcessor("rust_speaker_worklet", RustSpeakerWorklet);
+56 -8
View File
@@ -1,7 +1,39 @@
use std::env;
use std::path::Path;
use std::process::Command;
fn main() {
fn version_env() -> Option<()> {
if env::var("MUMBLE_WEB2_VERSION").is_ok() {
return Some(());
}
let output = Command::new("git")
.args(["rev-parse", "--short", "HEAD"])
.output()
.ok()?;
let git_hash = String::from_utf8(output.stdout).ok()?;
let git_hash = git_hash.trim(); // drop trailing newline
let status = Command::new("git")
.args(["status", "--porcelain"])
.output()
.ok()?;
let dirty = match status.stdout.is_empty() {
true => "",
false => "-dirty",
};
// Expose it as a compile-time env var
println!("cargo::rustc-env=MUMBLE_WEB2_VERSION=git-{git_hash}{dirty}");
// Optional: rebuild when HEAD changes
println!("cargo::rerun-if-changed=.git/HEAD");
Some(())
}
fn download_deepfilternet() {
// Define the target directory and file
let assets_dir = "assets";
let target_file = format!("{}/DeepFilterNet3_ll_onnx.tar.gz", assets_dir);
@@ -9,30 +41,46 @@ fn main() {
// Check if the file already exists
if target_path.exists() {
println!("cargo:warning=DeepFilterNet model already exists at {}", target_file);
println!(
"cargo::warning=DeepFilterNet model already exists at {}",
target_file
);
return;
}
println!("cargo:warning=Downloading DeepFilterNet model to {}...", target_file);
println!(
"cargo::warning=Downloading DeepFilterNet model to {}...",
target_file
);
// Download the file using curl
let url = "https://github.com/Rikorose/DeepFilterNet/raw/refs/heads/main/models/DeepFilterNet3_ll_onnx.tar.gz";
let status = Command::new("curl")
.args([
"-L", // Follow redirects
"-o", &target_file, // Output file
"-L", // Follow redirects
"-o",
&target_file, // Output file
url,
])
.status()
.expect("Failed to execute curl command. Make sure curl is installed.");
if !status.success() {
panic!("Failed to download DeepFilterNet model from {}", url);
println!("cargo::error=Failed to download DeepFilterNet model from {url}");
return;
}
println!("cargo:warning=Successfully downloaded DeepFilterNet model to {}", target_file);
println!(
"cargo::warning=Successfully downloaded DeepFilterNet model to {}",
target_file
);
// Rerun this build script if the target file is deleted
println!("cargo:rerun-if-changed={}", target_file);
println!("cargo::rerun-if-changed={}", target_file);
}
fn main() {
version_env();
download_deepfilternet();
}
+6 -19
View File
@@ -545,33 +545,15 @@ pub fn ServerView(config: Resource<ClientConfig>) -> Element {
)
}
async fn get_status(
client: &reqwest::Client,
status_url: &str,
) -> color_eyre::Result<ServerStatus> {
Ok(client
.get(status_url)
.send()
.await?
.json::<ServerStatus>()
.await?)
}
#[component]
pub fn LoginView(config: Resource<ClientConfig>) -> Element {
let net: Coroutine<Command> = use_coroutine_handle();
let last_status = use_signal(|| None::<color_eyre::Result<ServerStatus>>);
use_resource(move || async move {
let Some(config) = config.read().clone() else {
return;
};
let Some(status_url) = config.status_url else {
return;
};
let client = reqwest::Client::new();
loop {
*last_status.write_unchecked() = Some(get_status(&client, &status_url).await);
*last_status.write_unchecked() = Some(imp::get_status(&client).await);
imp::sleep(std::time::Duration::from_secs_f32(1.0)).await;
}
});
@@ -630,11 +612,16 @@ pub fn LoginView(config: Resource<ClientConfig>) -> Element {
),
Connected => unreachable!(),
};
let version = option_env!("MUMBLE_WEB2_VERSION");
rsx!(
div {
class: "login",
h1 {
"Mumble Web"
match version {
Some(v) => rsx!(" " span { class: "login_version", "({v})" }),
None => rsx!(),
}
}
if config.read().as_ref().is_some_and(|cfg| cfg.any_server) {
div {
+3 -3
View File
@@ -97,13 +97,13 @@ impl AudioProcessor {
}
impl AudioProcessor {
pub fn process(&mut self, audio: &[f32], output: &mut Vec<f32>) {
pub fn process(&mut self, audio: &[f32], channels: usize, output: &mut Vec<f32>) {
let mut include_raw = true;
if self.denoise {
with_denoising_model(&self.spawn, |df| {
include_raw = false;
self.buffer.extend_from_slice(audio);
self.buffer.extend(audio.iter().step_by(channels).copied());
output.reserve(audio.len());
let hop = df.hop_size;
@@ -130,7 +130,7 @@ impl AudioProcessor {
}
if include_raw {
output.extend_from_slice(audio);
output.extend(audio.iter().step_by(channels).copied());
}
}
}
+62 -25
View File
@@ -1,11 +1,11 @@
use crate::app::Command;
use crate::effects::{AudioProcessor, AudioProcessorSender};
use color_eyre::eyre::{eyre, Context, Error};
use cpal::traits::{DeviceTrait, HostTrait};
use color_eyre::eyre::{bail, eyre, Context, Error};
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait as _};
use dioxus::hooks::UnboundedReceiver;
use futures::io::{AsyncRead, AsyncWrite};
use mumble_protocol::control::ClientControlCodec;
use mumble_web2_common::ClientConfig;
use mumble_web2_common::{ClientConfig, ServerStatus};
use std::mem::replace;
use std::net::ToSocketAddrs;
use std::sync::Arc;
@@ -43,7 +43,7 @@ const PACKET_SAMPLES: u32 = 960;
type Buffer = Arc<Mutex<dasp_ring_buffer::Bounded<Vec<i16>>>>;
impl AudioSystem {
pub fn new() -> Result<Self, Error> {
pub async fn new() -> Result<Self, Error> {
// TODO
let host = cpal::default_host();
let name = host.id();
@@ -64,10 +64,46 @@ impl AudioSystem {
self.processors.store(Some(processor))
}
fn choose_config(
&self,
configs: impl Iterator<Item = cpal::SupportedStreamConfigRange>,
) -> Result<cpal::StreamConfig, Error> {
let mut supported_configs: Vec<_> = configs
.filter_map(|cfg| cfg.try_with_sample_rate(cpal::SampleRate(SAMPLE_RATE)))
.filter(|cfg| cfg.sample_format() == cpal::SampleFormat::I16)
.map(|cfg| cpal::StreamConfig {
buffer_size: cpal::BufferSize::Fixed(match *cfg.buffer_size() {
cpal::SupportedBufferSize::Range { min, max } => 480.clamp(min, max),
cpal::SupportedBufferSize::Unknown => 480,
}),
..cfg.config()
})
.collect();
supported_configs.sort_by(|a, b| {
let cpal::BufferSize::Fixed(a_buf) = a.buffer_size else {
unreachable!()
};
let cpal::BufferSize::Fixed(b_buf) = b.buffer_size else {
unreachable!()
};
Ord::cmp(&a.channels, &b.channels).then(Ord::cmp(&a_buf, &b_buf))
});
supported_configs
.get(0)
.cloned()
.ok_or(eyre!("no supported stream configs"))
}
pub fn start_recording(
&mut self,
mut each: impl FnMut(Vec<u8>) + Send + 'static,
) -> Result<(), Error> {
let config = self.choose_config(self.input.supported_input_configs()?)?;
info!(
"creating recording on {:?} with {:#?}",
self.input.name()?,
config
);
let mut encoder =
opus::Encoder::new(SAMPLE_RATE, opus::Channels::Mono, opus::Application::Voip)?;
let mut current_processor = AudioProcessor::new_plain();
@@ -78,7 +114,7 @@ impl AudioSystem {
if let Some(new_processor) = processors.take() {
current_processor = new_processor;
}
current_processor.process(frame, &mut output_buffer);
current_processor.process(frame, config.channels as usize, &mut output_buffer);
if output_buffer.len() < PACKET_SAMPLES as usize {
return;
}
@@ -94,17 +130,12 @@ impl AudioSystem {
}
};
match self.input.build_input_stream(
&cpal::StreamConfig {
channels: 1,
sample_rate: cpal::SampleRate(SAMPLE_RATE),
buffer_size: cpal::BufferSize::Fixed(PACKET_SAMPLES),
},
data_callback,
error_callback,
None,
) {
match self
.input
.build_input_stream(&config, data_callback, error_callback, None)
{
Ok(stream) => {
stream.play()?;
self.recording_stream = Some(stream);
Ok(())
}
@@ -116,6 +147,12 @@ impl AudioSystem {
}
pub fn create_player(&mut self) -> Result<AudioPlayer, Error> {
let config = self.choose_config(self.input.supported_input_configs()?)?;
info!(
"creating player on {:?} with {:#?}",
self.output.name().ok(),
&config
);
let buffer = Arc::new(Mutex::new(dasp_ring_buffer::Bounded::from_raw_parts(
0,
0,
@@ -128,20 +165,16 @@ impl AudioSystem {
let stream = {
let buffer = buffer.clone();
self.output.build_output_stream(
&cpal::StreamConfig {
channels: 1,
sample_rate: cpal::SampleRate(SAMPLE_RATE),
buffer_size: cpal::BufferSize::Fixed(480), // 10ms playback delay
},
move |frame, info| {
&config,
move |frame, _info| {
let mut buffer = buffer.lock().unwrap();
for x in frame.iter_mut() {
for x in frame.chunks_mut(config.channels as usize) {
match buffer.pop() {
Some(y) => {
*x = y;
x.fill(y);
}
None => {
*x = 0;
x.fill(0);
}
}
}
@@ -150,6 +183,7 @@ impl AudioSystem {
None,
)?
};
stream.play()?;
Ok(AudioPlayer {
decoder,
stream,
@@ -291,12 +325,15 @@ pub fn load_username() -> Option<String> {
pub async fn load_config() -> color_eyre::Result<ClientConfig> {
Ok(ClientConfig {
proxy_url: None,
status_url: None,
cert_hash: None,
any_server: true,
})
}
pub async fn get_status(client: &reqwest::Client) -> color_eyre::Result<ServerStatus> {
bail!("status not supported on desktop yet")
}
pub fn init_logging() {
use tracing::level_filters::LevelFilter;
use tracing_subscriber::filter::EnvFilter;
+68 -56
View File
@@ -6,7 +6,7 @@ use futures::{AsyncRead, AsyncWrite};
use gloo_timers::future::TimeoutFuture;
use js_sys::Float32Array;
use mumble_protocol::control::ClientControlCodec;
use mumble_web2_common::ClientConfig;
use mumble_web2_common::{ClientConfig, ServerStatus};
use reqwest::Url;
use std::future::Future;
use std::time::Duration;
@@ -15,7 +15,6 @@ use tracing::{debug, error, info, instrument};
use wasm_bindgen::prelude::*;
use wasm_bindgen_futures::JsFuture;
use web_sys::js_sys::{Promise, Reflect, Uint8Array};
use web_sys::AudioContext;
use web_sys::AudioContextOptions;
use web_sys::AudioData;
use web_sys::AudioDecoder;
@@ -30,14 +29,13 @@ use web_sys::EncodedAudioChunkInit;
use web_sys::EncodedAudioChunkType;
use web_sys::MediaStream;
use web_sys::MediaStreamConstraints;
use web_sys::MediaStreamTrackGenerator;
use web_sys::MediaStreamTrackGeneratorInit;
use web_sys::MessageEvent;
use web_sys::WebTransport;
use web_sys::WebTransportBidirectionalStream;
use web_sys::WebTransportOptions;
use web_sys::WorkletOptions;
use web_sys::{console, window};
use web_sys::{AudioContext, AudioDataCopyToOptions};
pub use wasm_bindgen_futures::spawn_local as spawn;
@@ -78,12 +76,41 @@ pub struct AudioSystem {
processors: AudioProcessorSender,
}
async fn attach_worklet(audio_context: &AudioContext) -> Result<(), Error> {
// Create worklets to process mic and speaker audio
// Speaker audio processing worklet only required on
// browsers that don't support MediaStreamTrackGenerator
let options = WorkletOptions::new();
Reflect::set(
&options,
&"processorOptions".into(),
&wasm_bindgen::module(),
)
.ey()?;
let module = asset!("assets/rust_audio_worklet.js").to_string();
info!("loading mic worklet from {module:?}");
audio_context
.audio_worklet()
.ey()?
.add_module_with_options(&module, &options)
.ey()?
.into_future()
.await
.ey()?;
Ok(())
}
impl AudioSystem {
pub fn new() -> Result<Self, Error> {
pub async fn new() -> Result<Self, Error> {
// Create MediaStreams to playback decoded audio
// The audio context is used to reproduce audio.
let webctx = configure_audio_context();
attach_worklet(&webctx).await?;
let processors = AudioProcessorSender::default();
Ok(AudioSystem { webctx, processors })
}
@@ -104,18 +131,10 @@ impl AudioSystem {
}
pub fn create_player(&mut self) -> Result<AudioPlayer, Error> {
let audio_stream_generator =
MediaStreamTrackGenerator::new(&MediaStreamTrackGeneratorInit::new("audio")).ey()?;
let sink_node = AudioWorkletNode::new(&self.webctx, "rust_speaker_worklet").ey()?;
// Create MediaStream from MediaStreamTrackGenerator
let js_tracks = web_sys::js_sys::Array::new();
js_tracks.push(&audio_stream_generator);
let media_stream = MediaStream::new_with_tracks(&js_tracks).ey()?;
// Create MediaStreamAudioSourceNode
let audio_source = self.webctx.create_media_stream_source(&media_stream).ey()?;
// Connect output of audio_source to audio_context (browser audio)
audio_source
// Connect worklet to destination
sink_node
.connect_with_audio_node(&self.webctx.destination())
.ey()?;
@@ -124,28 +143,31 @@ impl AudioSystem {
error!("error decoding audio {:?}", e);
}) as Box<dyn FnMut(JsValue)>);
// This knows what MediaStreamTrackGenerator to use as it closes around it
let sink_port = sink_node.port().ey()?;
let output = Closure::wrap(Box::new(move |audio_data: AudioData| {
let writable = audio_stream_generator.writable();
if writable.locked() {
return;
}
if let Err(e) = writable.get_writer().map(|writer| {
spawn(async move {
if let Err(e) = JsFuture::from(writer.ready()).await.ey() {
error!("write chunk ready error {:?}", e);
}
if let Err(e) = JsFuture::from(writer.write_with_chunk(&audio_data))
.await
.ey()
{
error!("write chunk error {:?}", e);
};
writer.release_lock();
});
}) {
error!("error writing audio data {:?}", e);
// Extract planar PCM from AudioData into an ArrayBuffer or Float32Array
// Here we assume f32 samples, 1 channel for brevity.
let number_of_frames = audio_data.number_of_frames();
let js_buffer = Float32Array::new_with_length(number_of_frames);
let audio_data_copy_to_options = &AudioDataCopyToOptions::new(0);
audio_data_copy_to_options.set_format(web_sys::AudioSampleFormat::F32);
if let Err(e) = audio_data
.copy_to_with_buffer_source(&js_buffer.buffer(), &audio_data_copy_to_options)
{
error!("could not copy audio data to array {:?}", e);
}
// Post to the worklet; include sampleRate and channel count if needed.
let msg = js_sys::Object::new();
js_sys::Reflect::set(&msg, &"samples".into(), &js_buffer).unwrap();
sink_port.post_message(&msg).unwrap();
audio_data.close();
}) as Box<dyn FnMut(AudioData)>);
let audio_decoder = AudioDecoder::new(&AudioDecoderInit::new(
@@ -209,7 +231,7 @@ fn process_audio(frame: &JsValue, processor: &mut AudioProcessor) {
};
let input = samples.to_vec();
let mut output = Vec::with_capacity(input.len());
processor.process(&input, &mut output);
processor.process(&input, 1, &mut output);
samples.copy_from(&output);
}
@@ -234,25 +256,6 @@ async fn run_encoder_worklet(
.map_err(|e| JsError::new(&format!("not a stream: {e:?}")))
.ey()?;
let options = WorkletOptions::new();
Reflect::set(
&options,
&"processorOptions".into(),
&wasm_bindgen::module(),
)
.ey()?;
let module = asset!("assets/rust_mic_worklet.js").to_string();
info!("loading mic worklet from {module:?}");
audio_context
.audio_worklet()
.ey()?
.add_module_with_options(&module, &options)
.ey()?
.into_future()
.await
.ey()?;
let source = audio_context.create_media_stream_source(&stream).ey()?;
let worklet_node = AudioWorkletNode::new(audio_context, "rust_mic_worklet").ey()?;
@@ -426,6 +429,15 @@ pub async fn load_config() -> color_eyre::Result<ClientConfig> {
Ok(config)
}
pub async fn get_status(client: &reqwest::Client) -> color_eyre::Result<ServerStatus> {
Ok(client
.get(absolute_url("status")?)
.send()
.await?
.json::<ServerStatus>()
.await?)
}
pub fn init_logging() {
// copied from tracing_web example usage
+1 -1
View File
@@ -113,7 +113,7 @@ pub async fn network_loop<R: imp::ImpRead, W: imp::ImpWrite>(
});
}
let mut audio = imp::AudioSystem::new()?;
let mut audio = imp::AudioSystem::new().await?;
{
let send_chan = send_chan.clone();
let mut sequence_num = 0;
+1 -7
View File
@@ -1,10 +1,6 @@
use color_eyre::eyre::{anyhow, bail, Context, Result};
use color_eyre::owo_colors::OwoColorize;
use mumble_web2_common::{ClientConfig, ServerStatus};
use once_cell::sync::OnceCell;
use rand::Rng;
use rcgen::date_time_ymd;
use rustls::server;
use salvo::conn::rustls::{Keycert, RustlsConfig};
use salvo::cors::{AllowOrigin, Cors};
use salvo::logging::Logger;
@@ -38,7 +34,6 @@ fn default_cert_alt_names() -> Vec<String> {
#[derive(Debug, Deserialize, Serialize)]
struct Config {
public_url: Url,
proxy_url: Option<Url>,
https_listen_address: SocketAddr,
http_listen_address: Option<SocketAddr>,
@@ -85,9 +80,8 @@ async fn main() -> Result<()> {
let mut client_config = ClientConfig {
proxy_url: match &server_config.proxy_url {
Some(url) => Some(url.to_string()),
None => Some(server_config.public_url.join("proxy")?.to_string()),
None => None,
},
status_url: Some(server_config.public_url.join("status")?.to_string()),
cert_hash: None,
any_server: false,
};