diff --git a/crates/components/connections/devices/ndi/src/lib.rs b/crates/components/connections/devices/ndi/src/lib.rs index 271e6ebf0..9a832cbff 100644 --- a/crates/components/connections/devices/ndi/src/lib.rs +++ b/crates/components/connections/devices/ndi/src/lib.rs @@ -136,7 +136,10 @@ impl NdiSource { pub fn frame(&mut self) -> anyhow::Result> { if self.recv.get_no_connections() == 0 { - tracing::warn!("Connected to no sources, trying to reconnect to {}", self.source.get_name()); + tracing::warn!( + "Connected to no sources, trying to reconnect to {}", + self.source.get_name() + ); self.recv.connect(&self.source); } tracing::trace!("Waiting for frame"); @@ -144,9 +147,7 @@ impl NdiSource { const TIMEOUT: u32 = 1000; match self.recv.capture_video(&mut video_data, TIMEOUT) { - FrameType::None => { - Ok(None) - } + FrameType::None => Ok(None), FrameType::Video => { if let Some(video_data) = video_data { tracing::debug!("Received video frame: {video_data:?}"); diff --git a/crates/runtime/pipeline/nodes/video/ndi/src/input.rs b/crates/runtime/pipeline/nodes/video/ndi/src/input.rs index 586247c33..b4a4fdc3e 100644 --- a/crates/runtime/pipeline/nodes/video/ndi/src/input.rs +++ b/crates/runtime/pipeline/nodes/video/ndi/src/input.rs @@ -166,7 +166,7 @@ impl NdiInputState { decode_handle, }) } - + fn check_background_decoder(&mut self) -> anyhow::Result<()> { if !self.decode_handle.is_alive() { tracing::warn!("Background decoder thread died, restarting"); @@ -174,7 +174,7 @@ impl NdiInputState { let metadata = self.decode_handle.decode(self.ndi_source_ref.clone())?; self.texture = BackgroundDecoderTexture::new(metadata); } - + Ok(()) } @@ -183,7 +183,10 @@ impl NdiInputState { } fn change_source(&mut self, ndi_source_ref: NdiSourceRef) -> anyhow::Result<()> { - tracing::trace!("Changing ndi source from {:?} to {ndi_source_ref:?}", self.ndi_source_ref); + tracing::trace!( + "Changing ndi source from {:?} to {ndi_source_ref:?}", + self.ndi_source_ref + ); let metadata = self.decode_handle.decode(ndi_source_ref.clone())?; self.texture = BackgroundDecoderTexture::new(metadata); self.ndi_source_ref = ndi_source_ref; diff --git a/crates/runtime/pipeline/nodes/video/src/background_thread_decoder.rs b/crates/runtime/pipeline/nodes/video/src/background_thread_decoder.rs index 9efc561ef..43e6b4178 100644 --- a/crates/runtime/pipeline/nodes/video/src/background_thread_decoder.rs +++ b/crates/runtime/pipeline/nodes/video/src/background_thread_decoder.rs @@ -1,8 +1,8 @@ -use std::borrow::Cow; use flume::{bounded, unbounded}; -use ringbuffer::{AllocRingBuffer, RingBuffer}; -use mizer_wgpu::TextureProvider; use mizer_wgpu::wgpu::TextureFormat; +use mizer_wgpu::TextureProvider; +use ringbuffer::{AllocRingBuffer, RingBuffer}; +use std::borrow::Cow; #[derive(Debug, Clone, Copy)] pub struct VideoMetadata { @@ -33,8 +33,11 @@ impl BackgroundDecoderThreadHandle { pub fn is_alive(&self) -> bool { !self.sender.is_disconnected() } - - pub fn decode(&mut self, args: TDecoder::CreateDecoder) -> anyhow::Result> { + + pub fn decode( + &mut self, + args: TDecoder::CreateDecoder, + ) -> anyhow::Result> { tracing::trace!("BackgroundDecoderThreadHandle::decode"); let (message_tx, message_rx) = bounded(5); self.sender @@ -55,11 +58,10 @@ impl BackgroundDecoderThreadHandle { pub fn send(&mut self, command: TDecoder::Commands) -> anyhow::Result<()> { let (message_tx, message_rx) = bounded(5); - self.sender - .send(BackgroundDecoderThreadMessage::Command( - command, - Some(message_tx), - ))?; + self.sender.send(BackgroundDecoderThreadMessage::Command( + command, + Some(message_tx), + ))?; self.receiver = message_rx; Ok(()) @@ -72,7 +74,11 @@ impl BackgroundDecoderThreadHandle { impl Drop for BackgroundDecoderThreadHandle { fn drop(&mut self) { - if self.sender.send(BackgroundDecoderThreadMessage::Exit).is_err() { + if self + .sender + .send(BackgroundDecoderThreadMessage::Exit) + .is_err() + { tracing::debug!("Error sending exit message, thread seems to be shut down already"); } } @@ -195,7 +201,9 @@ impl BackgroundDecoderThread { match decoder.decode() { Ok(Some(frame)) => { if let Err(err) = self.sender.send(VideoThreadEvent::DecodedFrame(frame)) { - tracing::error!("Closing decoder thread. Error sending decoded frame: {err:?}"); + tracing::error!( + "Closing decoder thread. Error sending decoded frame: {err:?}" + ); return; } } @@ -225,7 +233,10 @@ impl BackgroundDecoderTexture { self.metadata.is_some() } - pub fn receive_frames(&mut self, handle: &mut BackgroundDecoderThreadHandle) { + pub fn receive_frames( + &mut self, + handle: &mut BackgroundDecoderThreadHandle, + ) { profiling::scope!("BackgroundThreadTexture::receive_frames"); for event in handle.receiver.drain() { match event { diff --git a/crates/runtime/pipeline/nodes/video/src/lib.rs b/crates/runtime/pipeline/nodes/video/src/lib.rs index fc683d3f5..7fef3b9dc 100644 --- a/crates/runtime/pipeline/nodes/video/src/lib.rs +++ b/crates/runtime/pipeline/nodes/video/src/lib.rs @@ -32,4 +32,4 @@ mod static_color; mod transform; mod video_file; -pub mod background_thread_decoder; \ No newline at end of file +pub mod background_thread_decoder; diff --git a/crates/runtime/pipeline/nodes/video/src/video_file/texture.rs b/crates/runtime/pipeline/nodes/video/src/video_file/texture.rs index 6956e15d5..5881879e7 100644 --- a/crates/runtime/pipeline/nodes/video/src/video_file/texture.rs +++ b/crates/runtime/pipeline/nodes/video/src/video_file/texture.rs @@ -64,14 +64,14 @@ impl VideoTexture { columns[0].label("Texture Size"); if let Some(ref metadata) = self.metadata { columns[1].label(format!("{}x{}", metadata.width, metadata.height)); - }else { + } else { columns[1].label("N/A"); } columns[0].label("Video FPS"); if let Some(ref metadata) = self.metadata { columns[1].label(format!("{}", metadata.fps)); - }else { + } else { columns[1].label("N/A"); }