load an actual denoising model
This commit is contained in:
@@ -0,0 +1,38 @@
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
|
||||
fn main() {
|
||||
// Define the target directory and file
|
||||
let assets_dir = "assets";
|
||||
let target_file = format!("{}/DeepFilterNet3_ll_onnx.tar.gz", assets_dir);
|
||||
let target_path = Path::new(&target_file);
|
||||
|
||||
// Check if the file already exists
|
||||
if target_path.exists() {
|
||||
println!("cargo:warning=DeepFilterNet model already exists at {}", target_file);
|
||||
return;
|
||||
}
|
||||
|
||||
println!("cargo:warning=Downloading DeepFilterNet model to {}...", target_file);
|
||||
|
||||
// Download the file using curl
|
||||
let url = "https://github.com/Rikorose/DeepFilterNet/raw/refs/heads/main/models/DeepFilterNet3_ll_onnx.tar.gz";
|
||||
|
||||
let status = Command::new("curl")
|
||||
.args([
|
||||
"-L", // Follow redirects
|
||||
"-o", &target_file, // Output file
|
||||
url,
|
||||
])
|
||||
.status()
|
||||
.expect("Failed to execute curl command. Make sure curl is installed.");
|
||||
|
||||
if !status.success() {
|
||||
panic!("Failed to download DeepFilterNet model from {}", url);
|
||||
}
|
||||
|
||||
println!("cargo:warning=Successfully downloaded DeepFilterNet model to {}", target_file);
|
||||
|
||||
// Rerun this build script if the target file is deleted
|
||||
println!("cargo:rerun-if-changed={}", target_file);
|
||||
}
|
||||
+101
-9
@@ -1,25 +1,117 @@
|
||||
use crossbeam::atomic::AtomicCell;
|
||||
use df::tract::{mut_slice_as_arrayviewmut, slice_as_arrayview};
|
||||
use df::tract::{DfParams, DfTract, RuntimeParams};
|
||||
use dioxus::prelude::{asset, manganis, Asset};
|
||||
use std::cell::RefCell;
|
||||
use std::sync::Arc;
|
||||
use tracing::{error, info};
|
||||
|
||||
use crate::imp;
|
||||
|
||||
static DF_MODEL: Asset = asset!("/assets/DeepFilterNet3_ll_onnx.tar.gz");
|
||||
|
||||
enum DenoisingModelState {
|
||||
Nothing,
|
||||
Downloading(Arc<AtomicCell<Option<DfParams>>>),
|
||||
Availible(Box<DfTract>),
|
||||
}
|
||||
|
||||
fn with_denoising_model<O>(
|
||||
spawn: &imp::SpawnHandle,
|
||||
func: impl FnOnce(&mut DfTract) -> O,
|
||||
) -> Option<O> {
|
||||
// Using a thread local is super gross, but DfTract is not Send (so it can never leave the current
|
||||
// thread) while AudioProcessing itself might change threads whenever.
|
||||
thread_local! {
|
||||
static STATE: RefCell<DenoisingModelState> = const { RefCell::new(DenoisingModelState::Nothing) };
|
||||
}
|
||||
|
||||
STATE.with_borrow_mut(|state| match state {
|
||||
DenoisingModelState::Nothing => {
|
||||
let cell = Arc::new(AtomicCell::new(None));
|
||||
let cell_task = cell.clone();
|
||||
*state = DenoisingModelState::Downloading(cell);
|
||||
spawn.spawn(async move {
|
||||
let model_bytes = match imp::read_asset_bytes(&DF_MODEL).await {
|
||||
Ok(b) => b,
|
||||
Err(e) => {
|
||||
error!("could not read denoising model from \"{DF_MODEL}\": {e:?}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
let params = match DfParams::from_bytes(&model_bytes) {
|
||||
Ok(p) => p,
|
||||
Err(e) => {
|
||||
error!("could not load denoising model parameters: {e:?}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
cell_task.store(Some(params));
|
||||
});
|
||||
None
|
||||
}
|
||||
DenoisingModelState::Downloading(cell) => {
|
||||
if let Some(params) = cell.take() {
|
||||
let mut tract = match DfTract::new(params, &RuntimeParams::default_with_ch(1)) {
|
||||
Ok(t) => Box::new(t),
|
||||
Err(e) => {
|
||||
error!("could not create denoising engine: {e:?}");
|
||||
return None;
|
||||
}
|
||||
};
|
||||
info!("instantiated denoising engine");
|
||||
let out = func(&mut tract);
|
||||
*state = DenoisingModelState::Availible(tract);
|
||||
Some(out)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
DenoisingModelState::Availible(tract) => Some(func(tract)),
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct AudioProcessor {
|
||||
df: Option<::df::DFState>,
|
||||
denoise: bool,
|
||||
spawn: imp::SpawnHandle,
|
||||
}
|
||||
|
||||
impl AudioProcessor {
|
||||
pub fn new_plain() -> Self {
|
||||
AudioProcessor {
|
||||
denoise: false,
|
||||
spawn: imp::SpawnHandle::current(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_denoising() -> Self {
|
||||
let df = ::df::DFState::default();
|
||||
AudioProcessor { df: Some(df) }
|
||||
AudioProcessor {
|
||||
denoise: true,
|
||||
spawn: imp::SpawnHandle::current(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioProcessor {
|
||||
pub fn process(&mut self, audio: &[f32], output: &mut Vec<f32>) {
|
||||
if let Some(df) = &mut self.df {
|
||||
let start = output.len();
|
||||
output.extend(std::iter::repeat_n(0f32, audio.len()));
|
||||
df.process_frame(audio, &mut output[start..]);
|
||||
} else {
|
||||
let mut finished = false;
|
||||
if self.denoise {
|
||||
with_denoising_model(&self.spawn, |df| {
|
||||
let start = output.len();
|
||||
output.extend(std::iter::repeat_n(0f32, audio.len()));
|
||||
finished = true;
|
||||
let output = &mut output[start..];
|
||||
df.process(
|
||||
slice_as_arrayview(audio, &[audio.len()])
|
||||
.into_shape((1, audio.len()))
|
||||
.unwrap(),
|
||||
mut_slice_as_arrayviewmut(output, &[output.len()])
|
||||
.into_shape((1, output.len()))
|
||||
.unwrap(),
|
||||
);
|
||||
});
|
||||
}
|
||||
if !finished {
|
||||
output.extend_from_slice(audio);
|
||||
}
|
||||
}
|
||||
|
||||
+17
-7
@@ -1,16 +1,15 @@
|
||||
use crate::app::Command;
|
||||
use crate::effects::{AudioProcessor, AudioProcessorSender};
|
||||
use color_eyre::eyre::{eyre, Error};
|
||||
use color_eyre::eyre::{eyre, Context, Error};
|
||||
use cpal::traits::{DeviceTrait, HostTrait};
|
||||
use dioxus::hooks::{UnboundedReceiver, UnboundedSender};
|
||||
use dioxus::hooks::UnboundedReceiver;
|
||||
use futures::io::{AsyncRead, AsyncWrite};
|
||||
use mumble_protocol::control::{ClientControlCodec, ControlPacket};
|
||||
use mumble_protocol::Serverbound;
|
||||
use mumble_protocol::control::ClientControlCodec;
|
||||
use mumble_web2_common::ClientConfig;
|
||||
use std::mem::replace;
|
||||
use std::net::ToSocketAddrs;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
use std::{fmt, io, sync::Arc};
|
||||
use tokio::net::TcpStream;
|
||||
use tokio_rustls::rustls;
|
||||
use tokio_rustls::rustls::client::danger::{HandshakeSignatureValid, ServerCertVerifier};
|
||||
@@ -19,8 +18,9 @@ use tokio_rustls::rustls::ClientConfig as RlsClientConfig;
|
||||
use tokio_rustls::rustls::DigitallySignedStruct;
|
||||
use tokio_rustls::TlsConnector;
|
||||
use tokio_util::compat::{TokioAsyncReadCompatExt as _, TokioAsyncWriteCompatExt as _};
|
||||
use tracing::{debug, error, info, instrument, warn};
|
||||
use tracing::{error, info, instrument, warn};
|
||||
|
||||
pub use tokio::runtime::Handle as SpawnHandle;
|
||||
pub use tokio::task::spawn;
|
||||
pub use tokio::time::sleep;
|
||||
|
||||
@@ -70,7 +70,7 @@ impl AudioSystem {
|
||||
) -> Result<(), Error> {
|
||||
let mut encoder =
|
||||
opus::Encoder::new(SAMPLE_RATE, opus::Channels::Mono, opus::Application::Voip)?;
|
||||
let mut current_processor = AudioProcessor::default();
|
||||
let mut current_processor = AudioProcessor::new_plain();
|
||||
let mut output_buffer = Vec::new();
|
||||
let processors = self.processors.clone();
|
||||
let error_callback = move |e: cpal::StreamError| error!("error recording: {e:?}");
|
||||
@@ -311,3 +311,13 @@ pub fn init_logging() {
|
||||
.with_env_filter(env_filter)
|
||||
.init();
|
||||
}
|
||||
|
||||
// TODO: once we update to dioxus 0.7, swap this out with the dioxus-asset-resolver crate
|
||||
pub async fn read_asset_bytes(asset: &dioxus::prelude::Asset) -> color_eyre::Result<Vec<u8>> {
|
||||
let cur_exe = std::env::current_exe().unwrap();
|
||||
let path = cur_exe
|
||||
.parent()
|
||||
.unwrap()
|
||||
.join(asset.to_string().trim_matches('/'));
|
||||
Ok(std::fs::read(&path).with_context(|| format!("native path \"{}\"", path.display()))?)
|
||||
}
|
||||
|
||||
+24
-1
@@ -8,6 +8,7 @@ use js_sys::Float32Array;
|
||||
use mumble_protocol::control::ClientControlCodec;
|
||||
use mumble_web2_common::ClientConfig;
|
||||
use reqwest::Url;
|
||||
use std::future::Future;
|
||||
use std::time::Duration;
|
||||
use tracing::level_filters::LevelFilter;
|
||||
use tracing::{debug, error, info, instrument};
|
||||
@@ -283,7 +284,7 @@ async fn run_encoder_worklet(
|
||||
audio_encoder.configure(&encoder_config);
|
||||
info!("created audio encoder");
|
||||
|
||||
let mut current_processor = AudioProcessor::default();
|
||||
let mut current_processor = AudioProcessor::new_plain();
|
||||
let onmessage: Closure<dyn FnMut(MessageEvent)> = Closure::new(move |event: MessageEvent| {
|
||||
if let Some(new_processor) = processors.take() {
|
||||
current_processor = new_processor;
|
||||
@@ -444,3 +445,25 @@ pub fn init_logging() {
|
||||
|
||||
info!("logging initiated");
|
||||
}
|
||||
|
||||
// TODO: once we update to dioxus 0.7, swap this out with the dioxus-asset-resolver crate
|
||||
pub async fn read_asset_bytes(asset: &dioxus::prelude::Asset) -> color_eyre::Result<Vec<u8>> {
|
||||
let path = asset.to_string();
|
||||
let path = path.trim_matches('/');
|
||||
Ok(reqwest::get(path).await?.bytes().await?.to_vec())
|
||||
}
|
||||
|
||||
pub struct SpawnHandle;
|
||||
|
||||
impl SpawnHandle {
|
||||
pub fn current() -> Self {
|
||||
SpawnHandle
|
||||
}
|
||||
|
||||
pub fn spawn<F>(&self, future: F)
|
||||
where
|
||||
F: Future<Output = ()> + 'static,
|
||||
{
|
||||
spawn(future);
|
||||
}
|
||||
}
|
||||
|
||||
+1
-1
@@ -289,7 +289,7 @@ fn accept_command(
|
||||
if denoise {
|
||||
audio.set_processor(AudioProcessor::new_denoising());
|
||||
} else {
|
||||
audio.set_processor(AudioProcessor::default());
|
||||
audio.set_processor(AudioProcessor::new_plain());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user