diff --git a/Cargo.lock b/Cargo.lock index e62bb42b03..0ebeff98ee 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1259,6 +1259,7 @@ dependencies = [ "cap-audio", "cap-camera", "cap-camera-ffmpeg", + "cap-camera-windows", "cap-cursor-capture", "cap-cursor-info", "cap-enc-avfoundation", diff --git a/crates/camera-directshow/Cargo.toml b/crates/camera-directshow/Cargo.toml index b9d780b070..685da4c6f9 100644 --- a/crates/camera-directshow/Cargo.toml +++ b/crates/camera-directshow/Cargo.toml @@ -17,6 +17,7 @@ windows = { workspace = true, features = [ "Win32_System_Com_StructuredStorage", "Win32_System_Ole", "Win32_System_Variant", + "Win32_System_Performance", "Win32_Media_KernelStreaming", ] } diff --git a/crates/camera-ffmpeg/examples/cli.rs b/crates/camera-ffmpeg/examples/cli.rs index 21183ee09d..adfb9074d8 100644 --- a/crates/camera-ffmpeg/examples/cli.rs +++ b/crates/camera-ffmpeg/examples/cli.rs @@ -23,7 +23,7 @@ fn main() { let _handle = selected_camera .start_capturing(selected_format.0, |frame| { - let Ok(ff_frame) = frame.to_ffmpeg() else { + let Ok(ff_frame) = frame.as_ffmpeg() else { eprintln!("Failed to convert frame to FFmpeg"); return; }; diff --git a/crates/camera-ffmpeg/src/lib.rs b/crates/camera-ffmpeg/src/lib.rs index 58b41c4e1d..c822191664 100644 --- a/crates/camera-ffmpeg/src/lib.rs +++ b/crates/camera-ffmpeg/src/lib.rs @@ -11,5 +11,5 @@ pub use windows::*; pub trait CapturedFrameExt { /// Creates an ffmpeg video frame from the native frame. /// Only size, format, and data are set. - fn to_ffmpeg(&self) -> Result; + fn as_ffmpeg(&self) -> Result; } diff --git a/crates/camera-ffmpeg/src/macos.rs b/crates/camera-ffmpeg/src/macos.rs index 76a4ec9716..1ebd280172 100644 --- a/crates/camera-ffmpeg/src/macos.rs +++ b/crates/camera-ffmpeg/src/macos.rs @@ -5,7 +5,7 @@ use cidre::*; use crate::CapturedFrameExt; #[derive(thiserror::Error, Debug)] -pub enum ToFfmpegError { +pub enum AsFFmpegError { #[error("Unsupported media subtype '{0}'")] UnsupportedSubType(String), #[error("{0}")] @@ -13,7 +13,7 @@ pub enum ToFfmpegError { } impl CapturedFrameExt for CapturedFrame { - fn to_ffmpeg(&self) -> Result { + fn as_ffmpeg(&self) -> Result { let native = self.native().clone(); let width = native.image_buf().width(); @@ -112,7 +112,7 @@ impl CapturedFrameExt for CapturedFrame { ff_frame } format => { - return Err(ToFfmpegError::UnsupportedSubType(format.to_string())); + return Err(AsFFmpegError::UnsupportedSubType(format.to_string())); } }; diff --git a/crates/camera-ffmpeg/src/windows.rs b/crates/camera-ffmpeg/src/windows.rs index 389eeb8e21..087de07f40 100644 --- a/crates/camera-ffmpeg/src/windows.rs +++ b/crates/camera-ffmpeg/src/windows.rs @@ -5,18 +5,18 @@ use ffmpeg::{format::Pixel, frame::Video as FFVideo}; use crate::CapturedFrameExt; #[derive(thiserror::Error, Debug)] -pub enum ToFfmpegError { +pub enum AsFFmpegError { #[error("FailedToGetBytes: {0}")] FailedToGetBytes(windows_core::Error), } impl CapturedFrameExt for CapturedFrame { - fn to_ffmpeg(&self) -> Result { + fn as_ffmpeg(&self) -> Result { let native = self.native(); let width = native.width; let height = native.height; - let bytes = native.bytes().map_err(ToFfmpegError::FailedToGetBytes)?; + let bytes = native.bytes().map_err(AsFFmpegError::FailedToGetBytes)?; Ok(match native.pixel_format { PixelFormat::YUV420P => { @@ -79,13 +79,18 @@ impl CapturedFrameExt for CapturedFrame { ff_frame } PixelFormat::ARGB => { - let mut ff_frame = FFVideo::new(Pixel::ARGB, width as u32, height as u32); + let mut ff_frame = FFVideo::new( + // ik it's weird but that's how windows works + Pixel::BGRA, + width as u32, + height as u32, + ); let stride = ff_frame.stride(0); for y in 0..height { let row_width = width * 4; - let src_row = &bytes[y * row_width..]; + let src_row = &bytes[(height - y - 1) * row_width..]; let dest_row = &mut ff_frame.data_mut(0)[y * stride..]; dest_row[0..row_width].copy_from_slice(&src_row[0..row_width]); @@ -115,7 +120,7 @@ impl CapturedFrameExt for CapturedFrame { for y in 0..height { let row_width = width * 4; - let src_row = &bytes[y * row_width..]; + let src_row = &bytes[(height - y - 1) * row_width..]; let dest_row = &mut ff_frame.data_mut(0)[y * stride..]; dest_row[0..row_width].copy_from_slice(&src_row[0..row_width]); diff --git a/crates/recording/Cargo.toml b/crates/recording/Cargo.toml index 746448c129..d46cbfe883 100644 --- a/crates/recording/Cargo.toml +++ b/crates/recording/Cargo.toml @@ -61,6 +61,7 @@ cap-enc-avfoundation = { path = "../enc-avfoundation" } cap-enc-mediafoundation = { path = "../enc-mediafoundation" } cap-mediafoundation-ffmpeg = { path = "../mediafoundation-ffmpeg" } cap-mediafoundation-utils = { path = "../mediafoundation-utils" } +cap-camera-windows = { path = "../camera-windows" } windows = { workspace = true, features = [ "Win32_Foundation", "Win32_Graphics_Gdi", diff --git a/crates/recording/examples/recording-cli.rs b/crates/recording/examples/recording-cli.rs index 01c57e6af1..93178256f3 100644 --- a/crates/recording/examples/recording-cli.rs +++ b/crates/recording/examples/recording-cli.rs @@ -25,35 +25,39 @@ pub async fn main() { info!("Recording to directory '{}'", dir.path().display()); - // let camera_feed = CameraFeed::spawn(CameraFeed::default()); + let camera_info = cap_camera::list_cameras() + .find(|c| c.display_name().contains("NVIDIA")) + .unwrap(); - // camera_feed - // .ask(camera::SetInput { - // id: DeviceOrModelID::from_info(&cap_camera::list_cameras().next().unwrap()), - // }) - // .await - // .unwrap() - // .await - // .unwrap(); + let camera_feed = CameraFeed::spawn(CameraFeed::default()); - let (error_tx, _) = flume::bounded(1); - let mic_feed = MicrophoneFeed::spawn(MicrophoneFeed::new(error_tx)); - - mic_feed - .ask(microphone::SetInput { - label: - // MicrophoneFeed::list() - // .into_iter() - // .find(|(k, _)| k.contains("Focusrite")) - MicrophoneFeed::default() - .map(|v| v.0) - .unwrap(), + camera_feed + .ask(feeds::camera::SetInput { + id: feeds::camera::DeviceOrModelID::from_info(&camera_info), }) .await .unwrap() .await .unwrap(); + // let (error_tx, _) = flume::bounded(1); + // let mic_feed = MicrophoneFeed::spawn(MicrophoneFeed::new(error_tx)); + + // mic_feed + // .ask(microphone::SetInput { + // label: + // // MicrophoneFeed::list() + // // .into_iter() + // // .find(|(k, _)| k.contains("Focusrite")) + // MicrophoneFeed::default() + // .map(|v| v.0) + // .unwrap(), + // }) + // .await + // .unwrap() + // .await + // .unwrap(); + tokio::time::sleep(Duration::from_millis(10)).await; let (handle, _ready_rx) = studio_recording::Actor::builder( @@ -63,9 +67,9 @@ pub async fn main() { }, ) .with_system_audio(true) - // .with_mic_feed(std::sync::Arc::new( - // mic_feed.ask(microphone::Lock).await.unwrap(), - // )) + .with_camera_feed(std::sync::Arc::new( + camera_feed.ask(feeds::camera::Lock).await.unwrap(), + )) .build() .await .unwrap(); diff --git a/crates/recording/src/feeds/camera.rs b/crates/recording/src/feeds/camera.rs index b805497a84..a3b2080528 100644 --- a/crates/recording/src/feeds/camera.rs +++ b/crates/recording/src/feeds/camera.rs @@ -16,7 +16,7 @@ use std::{ time::Duration, }; use tokio::{runtime::Runtime, sync::oneshot, task::LocalSet}; -use tracing::{debug, error, trace, warn}; +use tracing::{debug, error, info, trace, warn}; const CAMERA_INIT_TIMEOUT: Duration = Duration::from_secs(4); @@ -59,6 +59,12 @@ impl OpenState { if let Some(connecting) = &self.connecting && id == connecting.id { + if let Some(attached) = self.attached.take() { + let _ = attached.done_tx.send(()); + } + + trace!("Attaching new camera"); + self.attached = Some(AttachedState { id, camera_info: data.camera_info, @@ -256,6 +262,7 @@ async fn setup_camera( }); let format = ideal_formats.swap_remove(0); + let frame_rate = format.frame_rate() as u32; let (ready_tx, ready_rx) = oneshot::channel(); @@ -263,7 +270,7 @@ async fn setup_camera( let capture_handle = camera .start_capturing(format.clone(), move |frame| { - let Ok(mut ff_frame) = frame.to_ffmpeg() else { + let Ok(mut ff_frame) = frame.as_ffmpeg() else { return; }; @@ -375,9 +382,15 @@ impl Message for CameraFeed { } }; + trace!("Waiting for camera to be done"); + let _ = done_rx.recv(); + trace!("Stoppping capture of {:?}", &id); + let _ = handle.stop_capturing(); + + info!("Stopped capture of {:?}", &id); }) }); diff --git a/packages/ui-solid/src/auto-imports.d.ts b/packages/ui-solid/src/auto-imports.d.ts index 424407d67e..03f8b0b6b0 100644 --- a/packages/ui-solid/src/auto-imports.d.ts +++ b/packages/ui-solid/src/auto-imports.d.ts @@ -53,7 +53,7 @@ declare global { const IconCapSettings: typeof import('~icons/cap/settings.jsx')['default'] const IconCapShadow: typeof import('~icons/cap/shadow.jsx')['default'] const IconCapSquare: typeof import('~icons/cap/square.jsx')['default'] - const IconCapStopCircle: typeof import("~icons/cap/stop-circle.jsx")["default"] + const IconCapStopCircle: typeof import('~icons/cap/stop-circle.jsx')['default'] const IconCapTrash: typeof import('~icons/cap/trash.jsx')['default'] const IconCapUndo: typeof import('~icons/cap/undo.jsx')['default'] const IconCapUpload: typeof import("~icons/cap/upload.jsx")["default"]