From 30705e84d65a276711a1beb2b20ed3cb593dc8e4 Mon Sep 17 00:00:00 2001 From: Richie McIlroy <33632126+richiemcilroy@users.noreply.github.com> Date: Wed, 29 Oct 2025 14:01:46 -0700 Subject: [PATCH 1/2] fix: synchronize audio pipeline stop with video completion --- apps/desktop/src-tauri/src/recording.rs | 6 +-- crates/recording/src/output_pipeline/core.rs | 15 ++++++- crates/recording/src/sources/audio_mixer.rs | 43 +++----------------- crates/recording/src/studio_recording.rs | 22 ++++++++++ 4 files changed, 44 insertions(+), 42 deletions(-) diff --git a/apps/desktop/src-tauri/src/recording.rs b/apps/desktop/src-tauri/src/recording.rs index e6d4f79478..10eab98b01 100644 --- a/apps/desktop/src-tauri/src/recording.rs +++ b/apps/desktop/src-tauri/src/recording.rs @@ -594,9 +594,9 @@ pub async fn start_recording( } .await; - let actor_done_fut = match spawn_actor_res.flatten() { - Ok(rx) => rx, - Err(err) => { + let actor_done_fut = match spawn_actor_res { + Ok(Ok(rx)) => rx, + Ok(Err(err)) | Err(err) => { let _ = RecordingEvent::Failed { error: err.clone() }.emit(&app); let mut dialog = MessageDialogBuilder::new( diff --git a/crates/recording/src/output_pipeline/core.rs b/crates/recording/src/output_pipeline/core.rs index f0e1f047c7..31bb91ecc4 100644 --- a/crates/recording/src/output_pipeline/core.rs +++ b/crates/recording/src/output_pipeline/core.rs @@ -219,10 +219,11 @@ impl OutputPipelineBuilder> { Ok(OutputPipeline { path, first_timestamp_rx: first_rx, - stop_token: Some(stop_token.drop_guard()), + stop_token: Some(stop_token.clone().drop_guard()), video_info: Some(video_info), done_fut: done_rx, pause_flag, + cancel_token: stop_token, }) } } @@ -272,10 +273,11 @@ impl OutputPipelineBuilder { Ok(OutputPipeline { path, first_timestamp_rx: first_rx, - stop_token: Some(stop_token.drop_guard()), + stop_token: Some(stop_token.clone().drop_guard()), video_info: None, done_fut: done_rx, pause_flag, + cancel_token: stop_token, }) } } @@ -559,6 +561,7 @@ pub struct OutputPipeline { video_info: Option, done_fut: DoneFut, pause_flag: Arc, + cancel_token: CancellationToken, } pub struct FinishedOutputPipeline { @@ -614,6 +617,14 @@ impl OutputPipeline { pub fn done_fut(&self) -> DoneFut { self.done_fut.clone() } + + pub fn cancel_token(&self) -> CancellationToken { + self.cancel_token.clone() + } + + pub fn cancel(&self) { + self.cancel_token.cancel(); + } } pub struct ChannelVideoSourceConfig { diff --git a/crates/recording/src/sources/audio_mixer.rs b/crates/recording/src/sources/audio_mixer.rs index b3adbf9a2b..218f2ae68f 100644 --- a/crates/recording/src/sources/audio_mixer.rs +++ b/crates/recording/src/sources/audio_mixer.rs @@ -7,8 +7,10 @@ use std::{ Arc, atomic::{AtomicBool, Ordering}, }, - time::{Duration, Instant}, + time::Duration, }; +#[cfg(not(any(target_os = "macos", windows)))] +use std::time::Instant; use tracing::{debug, info}; use crate::output_pipeline::AudioFrame; @@ -238,43 +240,10 @@ impl AudioMixer { fn buffer_sources(&mut self, now: Timestamp) { for source in &mut self.sources { let rate = source.info.rate(); - let buffer_timeout = source.buffer_timeout; + let _buffer_timeout = source.buffer_timeout; - if let Some(last) = source.buffer_last { - let last_end = last.0 + last.1; - if let Some(elapsed_since_last) = now - .duration_since(self.timestamps) - .checked_sub(last_end.duration_since(self.timestamps)) - { - let mut remaining = elapsed_since_last; - - while remaining > buffer_timeout { - let chunk_samples = samples_for_timeout(rate, buffer_timeout); - let frame_duration = duration_from_samples(chunk_samples, rate); - - let mut frame = ffmpeg::frame::Audio::new( - source.info.sample_format, - chunk_samples, - source.info.channel_layout(), - ); - frame.set_rate(source.info.rate() as u32); - - for i in 0..frame.planes() { - frame.data_mut(i).fill(0); - } - - let timestamp = last_end + (elapsed_since_last - remaining); - source.buffer_last = Some((timestamp, frame_duration)); - source.buffer.push_back(AudioFrame::new(frame, timestamp)); - - if frame_duration.is_zero() { - break; - } - - remaining = remaining.saturating_sub(frame_duration); - } - } - } + // Do not inject silence based on wall-clock pacing. We only bridge actual gaps + // when a new frame arrives (below), to keep emission data-driven. while let Ok(Some(AudioFrame { inner: frame, diff --git a/crates/recording/src/studio_recording.rs b/crates/recording/src/studio_recording.rs index bb06f96cfd..092b1cdc65 100644 --- a/crates/recording/src/studio_recording.rs +++ b/crates/recording/src/studio_recording.rs @@ -289,6 +289,28 @@ impl Pipeline { futures.push(system_audio.done_fut()); } + // Ensure non-video pipelines stop promptly when the video pipeline completes + { + let mic_cancel = self.microphone.as_ref().map(|p| p.cancel_token()); + let cam_cancel = self.camera.as_ref().map(|p| p.cancel_token()); + let sys_cancel = self.system_audio.as_ref().map(|p| p.cancel_token()); + + let screen_done = self.screen.done_fut(); + tokio::spawn(async move { + // When screen (video) finishes, cancel the other pipelines + let _ = screen_done.await; + if let Some(token) = mic_cancel.as_ref() { + token.cancel(); + } + if let Some(token) = cam_cancel.as_ref() { + token.cancel(); + } + if let Some(token) = sys_cancel.as_ref() { + token.cancel(); + } + }); + } + tokio::spawn(async move { while let Some(res) = futures.next().await { if let Err(err) = res From 21f7536a8f38e945a922886715fe819c31dd0d56 Mon Sep 17 00:00:00 2001 From: Richie McIlroy <33632126+richiemcilroy@users.noreply.github.com> Date: Wed, 29 Oct 2025 14:05:10 -0700 Subject: [PATCH 2/2] fmt --- crates/recording/src/sources/audio_mixer.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/recording/src/sources/audio_mixer.rs b/crates/recording/src/sources/audio_mixer.rs index 218f2ae68f..976fffbda7 100644 --- a/crates/recording/src/sources/audio_mixer.rs +++ b/crates/recording/src/sources/audio_mixer.rs @@ -1,6 +1,8 @@ use cap_media_info::AudioInfo; use cap_timestamp::{Timestamp, Timestamps}; use futures::channel::{mpsc, oneshot}; +#[cfg(not(any(target_os = "macos", windows)))] +use std::time::Instant; use std::{ collections::VecDeque, sync::{ @@ -9,8 +11,6 @@ use std::{ }, time::Duration, }; -#[cfg(not(any(target_os = "macos", windows)))] -use std::time::Instant; use tracing::{debug, info}; use crate::output_pipeline::AudioFrame;