diff --git a/.gitignore b/.gitignore index 8d420409c0..11f7115d03 100644 --- a/.gitignore +++ b/.gitignore @@ -34,6 +34,7 @@ pnpm-lock.yaml .output .vinxi native-deps* +apps/desktop/recordings-from-AppData-Roaming/ apps/storybook/storybook-static .tinyb diff --git a/apps/desktop/register_protocol.ps1 b/apps/desktop/register_protocol.ps1 new file mode 100644 index 0000000000..70c1b37c1a --- /dev/null +++ b/apps/desktop/register_protocol.ps1 @@ -0,0 +1,21 @@ +$ExePath = Join-Path $PSScriptRoot "../../target/debug/cap-desktop.exe" +$Protocol = "cap" + +if (-not (Test-Path $ExePath)) { + Write-Host "Error: cap-desktop.exe not found at $ExePath" -ForegroundColor Red + exit +} + +Write-Host "Registering $($Protocol):// protocol to: $ExePath" + +# HKEY_CLASSES_ROOT +$RegPath = "HKCU:\Software\Classes\$Protocol" +if (-not (Test-Path $RegPath)) { New-Item -Path $RegPath -Force } +New-ItemProperty -Path $RegPath -Name "URL Protocol" -Value "" -PropertyType String -Force + +$ShellPath = "$RegPath\shell\open\command" +if (-not (Test-Path $ShellPath)) { New-Item -Path $ShellPath -Force } +Set-Item -Path $ShellPath -Value "`"$ExePath`" `"%1`"" + +Write-Host "✅ Protocol $($Protocol):// registered successfully!" -ForegroundColor Green +Write-Host "You can now test it by running: start $($Protocol)://record" diff --git a/apps/desktop/set_build_env.sh b/apps/desktop/set_build_env.sh new file mode 100644 index 0000000000..55f91f179f --- /dev/null +++ b/apps/desktop/set_build_env.sh @@ -0,0 +1,28 @@ +#!/bin/bash +echo "Configuring Cap Desktop Build Environment..." + +# 1. Disable VCPKG to prevent conflicts +unset VCPKG_ROOT +echo " - Unset VCPKG_ROOT" + +# 2. Add CMake to PATH (VS Build Tools) +export PATH="/c/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/Common7/IDE/CommonExtensions/Microsoft/CMake/CMake/bin:$PATH" + +# 3. Configure FFmpeg 6.1 +export FFMPEG_DIR="C:/Tools/ffmpeg-6.1-shared" +export PATH="$FFMPEG_DIR/bin:$PATH" +export BINDGEN_EXTRA_CLANG_ARGS="-I$FFMPEG_DIR/include" +echo " - Configured FFmpeg 6.1 at $FFMPEG_DIR" + +# 4. Configure Clang +export LIBCLANG_PATH="C:/Program Files/LLVM/bin" +echo " - Configured Clang at $LIBCLANG_PATH" + +# 5. Ensure DLLs are present in target (Fixes STATUS_DLL_NOT_FOUND) +TARGET_DIR="../../../target/debug" +if [ -d "$TARGET_DIR" ]; then + echo " - Copying FFmpeg DLLs to target/debug..." + cp "$FFMPEG_DIR/bin/"*.dll "$TARGET_DIR/" 2>/dev/null || true +fi + +echo "✅ Environment Ready! You can now run 'pnpm tauri dev'." diff --git a/apps/desktop/src-tauri/src/deeplink_actions.rs b/apps/desktop/src-tauri/src/deeplink_actions.rs index fce75b4a84..4869eabd35 100644 --- a/apps/desktop/src-tauri/src/deeplink_actions.rs +++ b/apps/desktop/src-tauri/src/deeplink_actions.rs @@ -8,6 +8,8 @@ use tracing::trace; use crate::{App, ArcLock, recording::StartRecordingInputs, windows::ShowCapWindow}; +use tauri_specta::Event; + #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "snake_case")] pub enum CaptureMode { @@ -25,7 +27,10 @@ pub enum DeepLinkAction { capture_system_audio: bool, mode: RecordingMode, }, + StartDefaultRecording, StopRecording, + PauseRecording, + ResumeRecording, OpenEditor { project_path: PathBuf, }, @@ -87,7 +92,17 @@ impl TryFrom<&Url> for DeepLinkAction { }); } - match url.domain() { + if url.scheme() == "cap" { + return match url.host_str() { + Some("record") => Ok(Self::StartDefaultRecording), + Some("stop") => Ok(Self::StopRecording), + Some("pause") => Ok(Self::PauseRecording), + Some("resume") => Ok(Self::ResumeRecording), + _ => Err(ActionParseFromUrlError::Invalid), + }; + } + + match url.host_str() { Some(v) if v != "action" => Err(ActionParseFromUrlError::NotAction), _ => Err(ActionParseFromUrlError::Invalid), }?; @@ -143,9 +158,47 @@ impl DeepLinkAction { .await .map(|_| ()) } + DeepLinkAction::StartDefaultRecording => { + let state = app.state::>(); + let displays = cap_recording::screen_capture::list_displays(); + + if let Some((display, _)) = displays.into_iter().next() { + let capture_target = ScreenCaptureTarget::Display { id: display.id }; + let inputs = StartRecordingInputs { + mode: RecordingMode::Studio, + capture_target, + capture_system_audio: true, + organization_id: None, + }; + + crate::recording::start_recording(app.clone(), state, inputs) + .await + .map(|_| ()) + } else { + Err("No display found".to_string()) + } + } DeepLinkAction::StopRecording => { crate::recording::stop_recording(app.clone(), app.state()).await } + DeepLinkAction::PauseRecording => { + let state = app.state::>(); + let state_read = state.read().await; + if let Some(recording) = state_read.current_recording() { + recording.pause().await.map_err(|e| e.to_string())?; + crate::recording::RecordingEvent::Paused.emit(app).ok(); + } + Ok(()) + } + DeepLinkAction::ResumeRecording => { + let state = app.state::>(); + let state_read = state.read().await; + if let Some(recording) = state_read.current_recording() { + recording.resume().await.map_err(|e| e.to_string())?; + crate::recording::RecordingEvent::Resumed.emit(app).ok(); + } + Ok(()) + } DeepLinkAction::OpenEditor { project_path } => { crate::open_project_from_path(Path::new(&project_path), app.clone()) } diff --git a/apps/desktop/src-tauri/src/export.rs b/apps/desktop/src-tauri/src/export.rs index 963f1c1f2b..250fe210d6 100644 --- a/apps/desktop/src-tauri/src/export.rs +++ b/apps/desktop/src-tauri/src/export.rs @@ -367,6 +367,7 @@ pub async fn generate_export_preview( &render_constants.device, &render_constants.queue, render_constants.is_software_adapter, + render_constants.is_software_adapter, ); let frame = frame_renderer @@ -510,6 +511,7 @@ pub async fn generate_export_preview_fast( &editor.render_constants.device, &editor.render_constants.queue, editor.render_constants.is_software_adapter, + editor.render_constants.is_software_adapter, ); let frame = frame_renderer diff --git a/apps/desktop/src-tauri/src/gpu_context.rs b/apps/desktop/src-tauri/src/gpu_context.rs index 50c87ea7e2..ad90ee0519 100644 --- a/apps/desktop/src-tauri/src/gpu_context.rs +++ b/apps/desktop/src-tauri/src/gpu_context.rs @@ -49,41 +49,85 @@ static GPU: OnceCell> = OnceCell::const_new(); pub async fn get_shared_gpu() -> Option<&'static SharedGpuContext> { GPU.get_or_init(|| async { - let instance = wgpu::Instance::new(&wgpu::InstanceDescriptor::default()); + let instance = wgpu::Instance::new(&wgpu::InstanceDescriptor { + backends: wgpu::Backends::all(), + flags: wgpu::InstanceFlags::default() + | wgpu::InstanceFlags::ALLOW_UNDERLYING_NONCOMPLIANT_ADAPTER, + ..Default::default() + }); - let hardware_adapter = instance - .request_adapter(&wgpu::RequestAdapterOptions { - power_preference: wgpu::PowerPreference::HighPerformance, - force_fallback_adapter: false, - compatible_surface: None, - }) - .await - .ok(); + let adapters = instance.enumerate_adapters(wgpu::Backends::all()); - let (adapter, is_software_adapter) = if let Some(adapter) = hardware_adapter { + for adapter in &adapters { + let info = adapter.get_info(); tracing::info!( - adapter_name = adapter.get_info().name, - adapter_backend = ?adapter.get_info().backend, + "Found GPU adapter: {} (Vendor: 0x{:04X}, Backend: {:?}, Type: {:?}, LUID: {:?})", + info.name, + info.vendor, + info.backend, + info.device_type, + info.device + ); + } + + let (adapter, is_software_adapter) = if let Some(hardware_adapter) = adapters + .iter() + .find(|a| { + let info = a.get_info(); + // Prefer discrete GPU on Dx12 if available for zero-copy + info.device_type == wgpu::DeviceType::DiscreteGpu + && info.backend == wgpu::Backend::Dx12 + && info.name != "Microsoft Basic Render Driver" + }) + .or_else(|| { + // Secondary check for any hardware GPU on Dx12 + adapters.iter().find(|a| { + let info = a.get_info(); + info.device_type != wgpu::DeviceType::Cpu + && info.backend == wgpu::Backend::Dx12 + && info.name != "Microsoft Basic Render Driver" + }) + }) + .or_else(|| { + // Tertiary: try hardware on any backend (might have been missed by Dx12) + adapters.iter().find(|a| { + let info = a.get_info(); + info.device_type != wgpu::DeviceType::Cpu + && info.name != "Microsoft Basic Render Driver" + && !info.name.contains("WARP") + }) + }) { + let info = hardware_adapter.get_info(); + tracing::info!( + adapter_name = info.name, + adapter_backend = ?info.backend, "Using hardware GPU adapter for shared context" ); - (adapter, false) + (hardware_adapter.clone(), false) } else { - tracing::warn!("No hardware GPU adapter found, attempting software fallback for shared context"); - let software_adapter = instance + tracing::warn!( + "No clear hardware GPU adapter found via enumeration, attempting fallback" + ); + let fallback_adapter = instance .request_adapter(&wgpu::RequestAdapterOptions { - power_preference: wgpu::PowerPreference::LowPower, - force_fallback_adapter: true, + power_preference: wgpu::PowerPreference::HighPerformance, + force_fallback_adapter: false, compatible_surface: None, }) .await .ok()?; + let info = fallback_adapter.get_info(); + let is_software = info.device_type == wgpu::DeviceType::Cpu + || info.name == "Microsoft Basic Render Driver"; + tracing::info!( - adapter_name = software_adapter.get_info().name, - adapter_backend = ?software_adapter.get_info().backend, - "Using software adapter for shared context (CPU rendering - performance may be reduced)" + adapter_name = info.name, + adapter_backend = ?info.backend, + is_software = is_software, + "Using fallback GPU adapter for shared context" ); - (software_adapter, true) + (fallback_adapter, is_software) }; let (device, queue) = adapter diff --git a/apps/desktop/src-tauri/src/lib.rs b/apps/desktop/src-tauri/src/lib.rs index 05c552a58d..64690a1d1b 100644 --- a/apps/desktop/src-tauri/src/lib.rs +++ b/apps/desktop/src-tauri/src/lib.rs @@ -246,11 +246,14 @@ impl App { } fn close_occluder_windows(&self) { - for window in self.handle.webview_windows() { - if window.0.starts_with("window-capture-occluder-") { - let _ = window.1.close(); + let handle = self.handle.clone(); + let _ = handle.clone().run_on_main_thread(move || { + for (label, window) in handle.webview_windows() { + if label.starts_with("window-capture-occluder-") { + let _ = window.destroy(); + } } - } + }); } async fn restart_mic_feed(&mut self) -> Result<(), String> { @@ -804,9 +807,15 @@ async fn cleanup_camera_window(app: AppHandle, session_id: u64) { app_state.camera_preview.pause(); if !app_state.is_recording_active_or_pending() { - let has_visible_target_overlay = app.webview_windows().iter().any(|(label, window)| { - label.starts_with("target-select-overlay-") && window.is_visible().unwrap_or(false) + let app_clone = app.clone(); + let (tx, rx) = tokio::sync::oneshot::channel(); + let _ = app.run_on_main_thread(move || { + let has_visible = app_clone.webview_windows().iter().any(|(label, window)| { + label.starts_with("target-select-overlay-") && window.is_visible().unwrap_or(false) + }); + let _ = tx.send(has_visible); }); + let has_visible_target_overlay = rx.await.unwrap_or(false); let is_camera_only_mode = recording_settings::RecordingSettingsStore::get(&app) .ok() @@ -858,9 +867,16 @@ async fn cleanup_camera_after_overlay_close(app: AppHandle, captured_session_id: return; } - let has_visible_target_overlay = app.webview_windows().iter().any(|(label, window)| { - label.starts_with("target-select-overlay-") && window.is_visible().unwrap_or(false) + let app_clone = app.clone(); + let (tx, rx) = tokio::sync::oneshot::channel(); + let _ = app.run_on_main_thread(move || { + let has_visible = app_clone.webview_windows().iter().any(|(label, window)| { + label.starts_with("target-select-overlay-") && window.is_visible().unwrap_or(false) + }); + let _ = tx.send(has_visible); }); + let has_visible_target_overlay = rx.await.unwrap_or(false); + if has_visible_target_overlay { return; } @@ -3277,36 +3293,46 @@ pub async fn run(recording_logging_handle: LoggingHandle, logs_dir: PathBuf) { } match window_id { CapWindowId::Main => { - let app = app.clone(); - - for (id, window) in app.webview_windows() { - if let Ok(CapWindowId::TargetSelectOverlay { .. }) = - CapWindowId::from_str(&id) - { - let _ = window.hide(); - } + let app_handle = app.clone(); + + { + let app = app_handle.clone(); + let _ = app.clone().run_on_main_thread(move || { + for (id, window) in app.webview_windows() { + if let Ok(CapWindowId::TargetSelectOverlay { .. }) = + CapWindowId::from_str(&id) + { + let _ = window.hide(); + } + } + }); } - tokio::spawn(async move { - let state = app.state::>(); - let app_state = &mut *state.write().await; - - let camera_window_open = - CapWindowId::Camera.get(&app).is_some(); - - if !app_state.is_recording_active_or_pending() - && !camera_window_open - && !app_state.camera_in_use - { - let _ = - app_state.mic_feed.ask(microphone::RemoveInput).await; - let _ = app_state - .camera_feed - .ask(feeds::camera::RemoveInput) - .await; - - app_state.selected_mic_label = None; - app_state.selected_camera_id = None; + tokio::spawn({ + let app = app_handle.clone(); + async move { + let state = app.state::>(); + let mut app_state = state.write().await; + + let camera_window_open = + CapWindowId::Camera.get(&app).is_some(); + + if !app_state.is_recording_active_or_pending() + && !camera_window_open + && !app_state.camera_in_use + { + let _ = app_state + .mic_feed + .ask(microphone::RemoveInput) + .await; + let _ = app_state + .camera_feed + .ask(feeds::camera::RemoveInput) + .await; + + app_state.selected_mic_label = None; + app_state.selected_camera_id = None; + } } }); } @@ -3316,7 +3342,10 @@ pub async fn run(recording_logging_handle: LoggingHandle, logs_dir: PathBuf) { tokio::spawn(EditorInstances::remove(window.clone())); - restore_main_windows_if_no_editors(app); + let app_handle = app.clone(); + let _ = app_handle.clone().run_on_main_thread(move || { + restore_main_windows_if_no_editors(&app_handle); + }); } CapWindowId::ScreenshotEditor { id } => { let window_ids = @@ -3325,42 +3354,51 @@ pub async fn run(recording_logging_handle: LoggingHandle, logs_dir: PathBuf) { tokio::spawn(ScreenshotEditorInstances::remove(window.clone())); - restore_main_windows_if_no_editors(app); + let app_handle = app.clone(); + let _ = app_handle.clone().run_on_main_thread(move || { + restore_main_windows_if_no_editors(&app_handle); + }); } CapWindowId::Settings => { - for (label, window) in app.webview_windows() { - if let Ok(id) = CapWindowId::from_str(&label) - && matches!( - id, - CapWindowId::TargetSelectOverlay { .. } - | CapWindowId::Main - | CapWindowId::Camera - ) - { - let _ = window.show(); + let app_handle = app.clone(); + let _ = app_handle.clone().run_on_main_thread(move || { + for (label, window) in app_handle.webview_windows() { + if let Ok(id) = CapWindowId::from_str(&label) + && matches!( + id, + CapWindowId::TargetSelectOverlay { .. } + | CapWindowId::Main + | CapWindowId::Camera + ) + { + let _ = window.show(); + } } - } - #[cfg(target_os = "windows")] - if !has_open_editor_window(app) { - reopen_main_window(app); - } + #[cfg(target_os = "windows")] + if !has_open_editor_window(&app_handle) { + reopen_main_window(&app_handle); + } + }); return; } CapWindowId::Upgrade | CapWindowId::ModeSelect => { - for (label, window) in app.webview_windows() { - if let Ok(id) = CapWindowId::from_str(&label) - && matches!( - id, - CapWindowId::TargetSelectOverlay { .. } - | CapWindowId::Main - | CapWindowId::Camera - ) - { - let _ = window.show(); + let app_handle = app.clone(); + let _ = app_handle.clone().run_on_main_thread(move || { + for (label, window) in app_handle.webview_windows() { + if let Ok(id) = CapWindowId::from_str(&label) + && matches!( + id, + CapWindowId::TargetSelectOverlay { .. } + | CapWindowId::Main + | CapWindowId::Camera + ) + { + let _ = window.show(); + } } - } + }); return; } CapWindowId::TargetSelectOverlay { display_id } => { @@ -3396,16 +3434,26 @@ pub async fn run(recording_logging_handle: LoggingHandle, logs_dir: PathBuf) { }; } - if let Some(settings) = GeneralSettingsStore::get(app).unwrap_or(None) - && settings.hide_dock_icon - && app - .webview_windows() - .keys() - .all(|label| !CapWindowId::from_str(label).unwrap().activates_dock()) + #[cfg(target_os = "macos")] { - #[cfg(target_os = "macos")] - app.set_activation_policy(tauri::ActivationPolicy::Accessory) - .ok(); + if let Some(settings) = GeneralSettingsStore::get(app).unwrap_or(None) + && settings.hide_dock_icon + { + let app_handle = app.clone(); + let _ = app_handle.clone().run_on_main_thread(move || { + let windows = app_handle.webview_windows(); + let no_dock_windows = windows.keys().all(|label| { + CapWindowId::from_str(label) + .map(|id| !id.activates_dock()) + .unwrap_or(true) + }); + + if no_dock_windows { + let _ = app_handle + .set_activation_policy(tauri::ActivationPolicy::Accessory); + } + }); + } } } #[cfg(target_os = "macos")] @@ -3413,13 +3461,16 @@ pub async fn run(recording_logging_handle: LoggingHandle, logs_dir: PathBuf) { let window_id = CapWindowId::from_str(label); if matches!(window_id, Ok(CapWindowId::Upgrade)) { - for (label, window) in app.webview_windows() { - if let Ok(id) = CapWindowId::from_str(&label) - && matches!(id, CapWindowId::TargetSelectOverlay { .. }) - { - let _ = window.hide(); + let app_handle = app.clone(); + let _ = app_handle.clone().run_on_main_thread(move || { + for (label, window) in app_handle.webview_windows() { + if let Ok(id) = CapWindowId::from_str(&label) + && matches!(id, CapWindowId::TargetSelectOverlay { .. }) + { + let _ = window.hide(); + } } - } + }); } if *focused diff --git a/apps/desktop/src-tauri/src/target_select_overlay.rs b/apps/desktop/src-tauri/src/target_select_overlay.rs index e4d8a38326..3ee423c393 100644 --- a/apps/desktop/src-tauri/src/target_select_overlay.rs +++ b/apps/desktop/src-tauri/src/target_select_overlay.rs @@ -89,18 +89,23 @@ pub async fn open_target_select_overlays( .or_else(|| Display::get_containing_cursor().map(|d| d.id())) .unwrap_or_else(|| Display::primary().id()); - for (id, window) in app.webview_windows() { - if let Ok(CapWindowId::TargetSelectOverlay { - display_id: existing_id, - }) = CapWindowId::from_str(&id) - && !display_ids - .iter() - .any(|display_id| display_id == &existing_id) - { - let _ = window.hide(); - state.destroy(&existing_id, app.global_shortcut()); + let app_clone = app.clone(); + let display_ids_clone = display_ids.clone(); + let _ = app.run_on_main_thread(move || { + let state = app_clone.state::(); + for (id, window) in app_clone.webview_windows() { + if let Ok(CapWindowId::TargetSelectOverlay { + display_id: existing_id, + }) = CapWindowId::from_str(&id) + && !display_ids_clone + .iter() + .any(|display_id| display_id == &existing_id) + { + let _ = window.hide(); + state.destroy(&existing_id, app_clone.global_shortcut()); + } } - } + }); for display_id in &display_ids { let should_focus = display_id == &focus_display_id; @@ -289,23 +294,28 @@ pub async fn update_camera_overlay_bounds( #[specta::specta] #[tauri::command] -#[instrument(skip(app, state))] +#[instrument(skip(app, _state))] pub async fn close_target_select_overlays( app: AppHandle, - state: tauri::State<'_, WindowFocusManager>, + _state: tauri::State<'_, WindowFocusManager>, ) -> Result<(), String> { - let mut closed_display_ids = Vec::new(); + let app_clone = app.clone(); + let _ = app.run_on_main_thread(move || { + let mut closed_display_ids: Vec = Vec::new(); - for (id, window) in app.webview_windows() { - if let Ok(CapWindowId::TargetSelectOverlay { display_id }) = CapWindowId::from_str(&id) { - let _ = window.hide(); - closed_display_ids.push(display_id); + for (id, window) in app_clone.webview_windows() { + if let Ok(CapWindowId::TargetSelectOverlay { display_id }) = CapWindowId::from_str(&id) + { + let _ = window.hide(); + closed_display_ids.push(display_id); + } } - } - for display_id in closed_display_ids { - state.destroy(&display_id, app.global_shortcut()); - } + let state = app_clone.state::(); + for display_id in closed_display_ids { + state.destroy(&display_id, app_clone.global_shortcut()); + } + }); Ok(()) } @@ -418,28 +428,52 @@ impl WindowFocusManager { let mut main_window_was_seen = false; loop { - let cap_main = CapWindowId::Main.get(app); - let cap_settings = CapWindowId::Settings.get(app); + let app_clone = app.clone(); + let window_clone = window.clone(); + let (tx, rx) = tokio::sync::oneshot::channel(); - let main_window_available = cap_main.is_some(); - let settings_window_available = cap_settings.is_some(); + let mut main_window_was_seen_inner = main_window_was_seen; + let _ = app.run_on_main_thread(move || { + let cap_main = CapWindowId::Main.get(&app_clone); + let cap_settings = CapWindowId::Settings.get(&app_clone); - if main_window_available || settings_window_available { - main_window_was_seen = true; - } + let main_window_available = cap_main.is_some(); + let settings_window_available = cap_settings.is_some(); - if main_window_was_seen && !main_window_available && !settings_window_available - { - window.hide().ok(); - break; - } + let mut should_hide = false; + let mut should_refocus = false; + + if main_window_available || settings_window_available { + main_window_was_seen_inner = true; + } + + if main_window_was_seen_inner + && !main_window_available + && !settings_window_available + { + should_hide = true; + } + + #[cfg(windows)] + if let Some(cap_main) = cap_main { + let is_focused = cap_main.is_focused().ok().unwrap_or_default() + || window_clone.is_focused().unwrap_or_default(); + + if !is_focused { + should_refocus = true; + } + } - #[cfg(windows)] - if let Some(cap_main) = cap_main { - let should_refocus = cap_main.is_focused().ok().unwrap_or_default() - || window.is_focused().unwrap_or_default(); + let _ = tx.send((main_window_was_seen_inner, should_hide, should_refocus)); + }); - if !should_refocus { + if let Ok((new_main_seen, should_hide, should_refocus)) = rx.await { + main_window_was_seen = new_main_seen; + if should_hide { + window.hide().ok(); + break; + } + if should_refocus { window.set_focus().ok(); } } diff --git a/apps/desktop/src-tauri/src/tray.rs b/apps/desktop/src-tauri/src/tray.rs index c0a3c3f6e6..cff27ac498 100644 --- a/apps/desktop/src-tauri/src/tray.rs +++ b/apps/desktop/src-tauri/src/tray.rs @@ -331,7 +331,7 @@ fn get_current_mode(app: &AppHandle) -> RecordingMode { } fn is_setup_window_open(app: &AppHandle) -> bool { - app.webview_windows().contains_key("setup") + app.get_webview_window("setup").is_some() } fn create_mode_submenu(app: &AppHandle) -> tauri::Result> { diff --git a/apps/desktop/src-tauri/src/windows.rs b/apps/desktop/src-tauri/src/windows.rs index d4ac668ca1..51a6fdca8b 100644 --- a/apps/desktop/src-tauri/src/windows.rs +++ b/apps/desktop/src-tauri/src/windows.rs @@ -83,16 +83,21 @@ fn is_system_dark_mode() -> bool { } fn hide_recording_windows(app: &AppHandle) { - for (label, window) in app.webview_windows() { - if let Ok(id) = CapWindowId::from_str(&label) - && matches!( - id, - CapWindowId::TargetSelectOverlay { .. } | CapWindowId::Main | CapWindowId::Camera - ) - { - let _ = window.hide(); + let app_clone = app.clone(); + let _ = app.clone().run_on_main_thread(move || { + for (label, window) in app_clone.webview_windows() { + if let Ok(id) = CapWindowId::from_str(&label) + && matches!( + id, + CapWindowId::TargetSelectOverlay { .. } + | CapWindowId::Main + | CapWindowId::Camera + ) + { + let _ = window.hide(); + } } - } + }); } async fn cleanup_camera_window( @@ -405,6 +410,7 @@ impl CapWindowId { } } + #[allow(dead_code)] pub fn activates_dock(&self) -> bool { matches!( self, @@ -2092,14 +2098,15 @@ fn should_protect_window(app: &AppHandle, window_title: &str) -> bool { #[specta::specta] #[instrument(skip(app))] pub fn refresh_window_content_protection(app: AppHandle) -> Result<(), String> { - for (label, window) in app.webview_windows() { - if let Ok(id) = CapWindowId::from_str(&label) { - let title = id.title(); - window - .set_content_protected(should_protect_window(&app, &title)) - .map_err(|e| e.to_string())?; + let app_clone = app.clone(); + let _ = app.run_on_main_thread(move || { + for (label, window) in app_clone.webview_windows() { + if let Ok(id) = CapWindowId::from_str(&label) { + let title = id.title(); + let _ = window.set_content_protected(should_protect_window(&app_clone, &title)); + } } - } + }); Ok(()) } diff --git a/apps/desktop/src-tauri/tauri.conf.json b/apps/desktop/src-tauri/tauri.conf.json index 691c2f0995..f3cec2cb42 100644 --- a/apps/desktop/src-tauri/tauri.conf.json +++ b/apps/desktop/src-tauri/tauri.conf.json @@ -27,10 +27,13 @@ } }, "plugins": { - "updater": { "active": false, "pubkey": "" }, + "updater": { + "active": false, + "pubkey": "" + }, "deep-link": { "desktop": { - "schemes": ["cap-desktop"] + "schemes": ["cap-desktop", "cap"] } } }, diff --git a/apps/desktop/src/utils/tauri.ts b/apps/desktop/src/utils/tauri.ts index 52334f0fc8..540ee10dd5 100644 --- a/apps/desktop/src/utils/tauri.ts +++ b/apps/desktop/src/utils/tauri.ts @@ -397,6 +397,7 @@ videoImportProgress: "video-import-progress" /** user-defined types **/ +export type AllGpusInfo = { gpus: GpuInfoDiag[]; primaryGpuIndex: number | null; isMultiGpuSystem: boolean; hasDiscreteGpu: boolean } export type Annotation = { id: string; type: AnnotationType; x: number; y: number; width: number; height: number; strokeColor: string; strokeWidth: number; fillColor: string; opacity: number; rotation: number; text: string | null; maskType?: MaskType | null; maskLevel?: number | null } export type AnnotationType = "arrow" | "circle" | "rectangle" | "text" | "mask" export type AppTheme = "system" | "light" | "dark" @@ -470,6 +471,7 @@ quality: number | null; */ fast: boolean | null } export type GlideDirection = "none" | "left" | "right" | "up" | "down" +export type GpuInfoDiag = { vendor: string; description: string; dedicatedVideoMemoryMb: number; adapterIndex: number; isSoftwareAdapter: boolean; isBasicRenderDriver: boolean; supportsHardwareEncoding: boolean } export type HapticPattern = "alignment" | "levelChange" | "generic" export type HapticPerformanceTime = "default" | "now" | "drawCompleted" export type Hotkey = { code: string; meta: boolean; ctrl: boolean; alt: boolean; shift: boolean } @@ -483,7 +485,6 @@ export type JsonValue = [T] export type LogicalBounds = { position: LogicalPosition; size: LogicalSize } export type LogicalPosition = { x: number; y: number } export type LogicalSize = { width: number; height: number } -export type MacOSVersionInfo = { major: number; minor: number; patch: number; displayName: string; buildNumber: string; isAppleSilicon: boolean } export type MainWindowRecordingStartBehaviour = "close" | "minimise" export type MaskKeyframes = { position?: MaskVectorKeyframe[]; size?: MaskVectorKeyframe[]; intensity?: MaskScalarKeyframe[] } export type MaskKind = "sensitive" | "highlight" @@ -527,6 +528,7 @@ export type RecordingStatus = "pending" | "recording" export type RecordingStopped = null export type RecordingTargetMode = "display" | "window" | "area" | "camera" export type RenderFrameEvent = { frame_number: number; fps: number; resolution_base: XY } +export type RenderingStatus = { isUsingSoftwareRendering: boolean; isUsingBasicRenderDriver: boolean; hardwareEncodingAvailable: boolean; warningMessage: string | null } export type RequestOpenRecordingPicker = { target_mode: RecordingTargetMode | null } export type RequestOpenSettings = { page: string } export type RequestScreenCapturePrewarm = { force?: boolean } @@ -549,7 +551,7 @@ export type StartRecordingInputs = { capture_target: ScreenCaptureTarget; captur export type StereoMode = "stereo" | "monoL" | "monoR" export type StudioRecordingMeta = { segment: SingleSegment } | { inner: MultipleSegments } export type StudioRecordingStatus = { status: "InProgress" } | { status: "NeedsRemux" } | { status: "Failed"; error: string } | { status: "Complete" } -export type SystemDiagnostics = { macosVersion: MacOSVersionInfo | null; availableEncoders: string[]; screenCaptureSupported: boolean; metalSupported: boolean; gpuName: string | null } +export type SystemDiagnostics = { windowsVersion: WindowsVersionInfo | null; gpuInfo: GpuInfoDiag | null; allGpus: AllGpusInfo | null; renderingStatus: RenderingStatus; availableEncoders: string[]; graphicsCaptureSupported: boolean; d3D11VideoProcessorAvailable: boolean } export type TargetUnderCursor = { display_id: DisplayId | null; window: WindowUnderCursor | null } export type TextSegment = { start: number; end: number; enabled?: boolean; content?: string; center?: XY; size?: XY; fontFamily?: string; fontSize?: number; fontWeight?: number; italic?: boolean; color?: string; fadeDuration?: number } export type TimelineConfiguration = { segments: TimelineSegment[]; zoomSegments: ZoomSegment[]; sceneSegments?: SceneSegment[]; maskSegments?: MaskSegment[]; textSegments?: TextSegment[] } @@ -568,6 +570,7 @@ export type WindowExclusion = { bundleIdentifier?: string | null; ownerName?: st export type WindowId = string export type WindowPosition = { x: number; y: number } export type WindowUnderCursor = { id: WindowId; app_name: string; bounds: LogicalBounds } +export type WindowsVersionInfo = { major: number; minor: number; build: number; displayName: string; meetsRequirements: boolean; isWindows11: boolean } export type XY = { x: T; y: T } export type ZoomMode = "auto" | { manual: { x: number; y: number } } export type ZoomSegment = { start: number; end: number; amount: number; mode: ZoomMode; glideDirection?: GlideDirection; glideSpeed?: number; instantAnimation?: boolean; edgeSnapRatio?: number } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 012869016a..20c7286fe8 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -76,6 +76,7 @@ impl Renderer { &self.render_constants.device, &self.render_constants.queue, self.render_constants.is_software_adapter, + self.render_constants.is_software_adapter, ); struct PendingFrame { diff --git a/crates/mediafoundation-utils/src/lib.rs b/crates/mediafoundation-utils/src/lib.rs index 3ead730fc1..1ce3ae6224 100644 --- a/crates/mediafoundation-utils/src/lib.rs +++ b/crates/mediafoundation-utils/src/lib.rs @@ -7,7 +7,10 @@ use std::{ use windows::{ Win32::{ Media::MediaFoundation::{IMFMediaBuffer, MFSTARTUP_FULL, MFStartup}, - System::WinRT::{RO_INIT_MULTITHREADED, RoInitialize}, + System::{ + Com::{COINIT_MULTITHREADED, CoInitializeEx}, + WinRT::{RO_INIT_MULTITHREADED, RoInitialize}, + }, }, core::Result, }; @@ -16,10 +19,38 @@ use windows::{ pub const MF_VERSION: u32 = 131184; pub fn thread_init() { + let _ = unsafe { CoInitializeEx(None, COINIT_MULTITHREADED) }; let _ = unsafe { RoInitialize(RO_INIT_MULTITHREADED) }; let _ = unsafe { MFStartup(MF_VERSION, MFSTARTUP_FULL) }; } +pub fn thread_uninit() { + let _ = unsafe { windows::Win32::Media::MediaFoundation::MFShutdown() }; + unsafe { windows::Win32::System::WinRT::RoUninitialize() }; + unsafe { windows::Win32::System::Com::CoUninitialize() }; +} + +pub struct ThreadInitGuard; + +impl ThreadInitGuard { + pub fn new() -> Self { + thread_init(); + Self + } +} + +impl Drop for ThreadInitGuard { + fn drop(&mut self) { + thread_uninit(); + } +} + +impl Default for ThreadInitGuard { + fn default() -> Self { + Self::new() + } +} + pub trait IMFMediaBufferExt { fn lock(&self) -> Result>; fn lock_for_write(&self) -> Result>; diff --git a/crates/recording/src/instant_recording.rs b/crates/recording/src/instant_recording.rs index 5b430c8087..dd74649f69 100644 --- a/crates/recording/src/instant_recording.rs +++ b/crates/recording/src/instant_recording.rs @@ -361,7 +361,7 @@ pub async fn spawn_instant_recording_actor( let content_dir = ensure_dir(&recording_dir.join("content"))?; #[cfg(windows)] - cap_mediafoundation_utils::thread_init(); + let _mf_guard = cap_mediafoundation_utils::ThreadInitGuard::new(); let (pipeline, video_info) = match inputs.capture_target { ScreenCaptureTarget::CameraOnly => { diff --git a/crates/recording/src/output_pipeline/win.rs b/crates/recording/src/output_pipeline/win.rs index 9da08a5d48..f2fd82e7e8 100644 --- a/crates/recording/src/output_pipeline/win.rs +++ b/crates/recording/src/output_pipeline/win.rs @@ -300,7 +300,7 @@ impl Muxer for WindowsMuxer { } } - match encoder { + let res = match encoder { either::Left((mut encoder, mut muxer)) => { trace!("Running native encoder with frame pacing"); let frame_interval = Duration::from_secs_f64(1.0 / config.frame_rate as f64); @@ -459,7 +459,10 @@ impl Muxer for WindowsMuxer { Ok(()) } - } + }; + + cap_mediafoundation_utils::thread_uninit(); + res }); } @@ -805,7 +808,7 @@ impl Muxer for WindowsCameraMuxer { } } - match encoder { + let res = match encoder { either::Left((mut encoder, mut muxer)) => { info!( "Windows camera encoder started (hardware) with frame pacing: {:?} {}x{} -> NV12 {}x{} @ {}fps", @@ -988,7 +991,10 @@ impl Muxer for WindowsCameraMuxer { ); Ok(()) } - } + }; + + cap_mediafoundation_utils::thread_uninit(); + res }); } @@ -1580,7 +1586,7 @@ pub fn upload_mf_buffer_to_texture( let buffer_guard = frame .buffer .lock() - .map_err(|_| windows::core::Error::from(windows::core::HRESULT(-1)))?; + .map_err(|_| windows::core::Error::from(windows::Win32::Foundation::E_FAIL))?; let lock = buffer_guard.lock()?; let original_data = &*lock; diff --git a/crates/recording/src/output_pipeline/win_fragmented_m4s.rs b/crates/recording/src/output_pipeline/win_fragmented_m4s.rs index 007c855262..8ca66051e2 100644 --- a/crates/recording/src/output_pipeline/win_fragmented_m4s.rs +++ b/crates/recording/src/output_pipeline/win_fragmented_m4s.rs @@ -247,7 +247,8 @@ impl WindowsFragmentedM4SMuxer { let (video_tx, video_rx) = sync_channel::>(buffer_size); - let (ready_tx, ready_rx) = sync_channel::>(1); + let (ready_tx, ready_rx) = + sync_channel::>>>(1); let encoder_config = SegmentedVideoEncoderConfig { segment_duration: self.segment_duration, @@ -256,21 +257,33 @@ impl WindowsFragmentedM4SMuxer { output_size: self.output_size, }; - let mut encoder = - SegmentedVideoEncoder::init(self.base_path.clone(), self.video_config, encoder_config)?; - if let Some(callback) = &self.disk_space_callback { - encoder.set_disk_space_callback(callback.clone()); - } - let encoder = Arc::new(Mutex::new(encoder)); - let encoder_clone = encoder.clone(); - + let base_path = self.base_path.clone(); let video_config = self.video_config; + let disk_space_callback = self.disk_space_callback.clone(); + let encoder_handle = std::thread::Builder::new() .name("win-m4s-segment-encoder".to_string()) .spawn(move || { cap_mediafoundation_utils::thread_init(); - if ready_tx.send(Ok(())).is_err() { + let mut encoder_inst = + match SegmentedVideoEncoder::init(base_path, video_config, encoder_config) { + Ok(e) => e, + Err(e) => { + let err = anyhow!(e); + let _ = ready_tx.send(Err(anyhow!(err.to_string()))); + return Err(err); + } + }; + + if let Some(callback) = disk_space_callback { + encoder_inst.set_disk_space_callback(callback); + } + + let encoder = Arc::new(Mutex::new(encoder_inst)); + let encoder_clone = encoder.clone(); + + if ready_tx.send(Ok(encoder)).is_err() { return Err(anyhow!("Failed to send ready signal - receiver dropped")); } @@ -467,10 +480,11 @@ impl WindowsFragmentedM4SMuxer { ); } + cap_mediafoundation_utils::thread_uninit(); Ok(()) })?; - ready_rx + let encoder = ready_rx .recv() .map_err(|_| anyhow!("Windows M4S encoder thread ended unexpectedly"))??; @@ -681,7 +695,8 @@ impl WindowsFragmentedM4SCameraMuxer { let (video_tx, video_rx) = sync_channel::>(buffer_size); - let (ready_tx, ready_rx) = sync_channel::>(1); + let (ready_tx, ready_rx) = + sync_channel::>>>(1); let encoder_config = SegmentedVideoEncoderConfig { segment_duration: self.segment_duration, @@ -690,21 +705,36 @@ impl WindowsFragmentedM4SCameraMuxer { output_size: self.output_size, }; - let mut encoder = - SegmentedVideoEncoder::init(self.base_path.clone(), self.video_config, encoder_config)?; - if let Some(callback) = &self.disk_space_callback { - encoder.set_disk_space_callback(callback.clone()); - } - let encoder = Arc::new(Mutex::new(encoder)); - let encoder_clone = encoder.clone(); - + let base_path = self.base_path.clone(); let video_config = self.video_config; + let disk_space_callback = self.disk_space_callback.clone(); + let encoder_handle = std::thread::Builder::new() .name("win-m4s-camera-segment-encoder".to_string()) .spawn(move || { cap_mediafoundation_utils::thread_init(); - if ready_tx.send(Ok(())).is_err() { + let mut encoder_inst = match SegmentedVideoEncoder::init( + base_path, + video_config, + encoder_config, + ) { + Ok(e) => e, + Err(e) => { + let err = anyhow!(e); + let _ = ready_tx.send(Err(anyhow!(err.to_string()))); + return Err(err); + } + }; + + if let Some(callback) = disk_space_callback { + encoder_inst.set_disk_space_callback(callback); + } + + let encoder = Arc::new(Mutex::new(encoder_inst)); + let encoder_clone = encoder.clone(); + + if ready_tx.send(Ok(encoder)).is_err() { return Err(anyhow!( "Failed to send ready signal - camera receiver dropped" )); @@ -901,10 +931,11 @@ impl WindowsFragmentedM4SCameraMuxer { ); } + cap_mediafoundation_utils::thread_uninit(); Ok(()) })?; - ready_rx + let encoder = ready_rx .recv() .map_err(|_| anyhow!("Windows M4S camera encoder thread ended unexpectedly"))??; diff --git a/crates/recording/src/output_pipeline/win_segmented.rs b/crates/recording/src/output_pipeline/win_segmented.rs index 69b36ef5fd..18bdb5ed0f 100644 --- a/crates/recording/src/output_pipeline/win_segmented.rs +++ b/crates/recording/src/output_pipeline/win_segmented.rs @@ -513,7 +513,7 @@ impl WindowsSegmentedMuxer { } }; - match encoder { + let res = match encoder { either::Left((mut encoder, mut muxer)) => { trace!("Running native encoder for segment"); let mut first_timestamp: Option = None; @@ -541,7 +541,7 @@ impl WindowsSegmentedMuxer { Err(e) => { error!("Failed to lock output mutex: {e}"); return Err(windows::core::Error::new( - windows::core::HRESULT(0x80004005u32 as i32), + windows::Win32::Foundation::E_FAIL, format!("Mutex poisoned: {e}"), )); } @@ -595,7 +595,10 @@ impl WindowsSegmentedMuxer { Ok(()) } - } + }; + + cap_mediafoundation_utils::thread_uninit(); + res })?; ready_rx diff --git a/crates/recording/src/output_pipeline/win_segmented_camera.rs b/crates/recording/src/output_pipeline/win_segmented_camera.rs index b9f2e52bb3..6008fe2134 100644 --- a/crates/recording/src/output_pipeline/win_segmented_camera.rs +++ b/crates/recording/src/output_pipeline/win_segmented_camera.rs @@ -499,7 +499,7 @@ impl WindowsSegmentedCameraMuxer { } }; - match encoder { + let res = match encoder { either::Left((mut encoder, mut muxer)) => { info!( "Camera segment encoder started (hardware): {:?} {}x{} -> NV12 {}x{} @ {}fps", @@ -535,7 +535,7 @@ impl WindowsSegmentedCameraMuxer { |output_sample| { let mut output = output_clone.lock().map_err(|e| { windows::core::Error::new( - windows::core::HRESULT(-1), + windows::Win32::Foundation::E_FAIL, format!("Mutex poisoned: {e}"), ) })?; @@ -543,7 +543,7 @@ impl WindowsSegmentedCameraMuxer { .write_sample(&output_sample, &mut output) .map_err(|e| { windows::core::Error::new( - windows::core::HRESULT(-1), + windows::Win32::Foundation::E_FAIL, format!("WriteSample: {e}"), ) }) @@ -617,7 +617,10 @@ impl WindowsSegmentedCameraMuxer { Ok(()) } - } + }; + + cap_mediafoundation_utils::thread_uninit(); + res })?; ready_rx diff --git a/crates/recording/src/sources/screen_capture/windows.rs b/crates/recording/src/sources/screen_capture/windows.rs index 5bbd7138ac..a41fa1e9e1 100644 --- a/crates/recording/src/sources/screen_capture/windows.rs +++ b/crates/recording/src/sources/screen_capture/windows.rs @@ -310,6 +310,7 @@ impl output_pipeline::VideoSource for VideoSource { drop(drop_guard); + cap_mediafoundation_utils::thread_uninit(); Ok(()) }); diff --git a/crates/rendering/Cargo.toml b/crates/rendering/Cargo.toml index 8727008bcb..bea9ce359d 100644 --- a/crates/rendering/Cargo.toml +++ b/crates/rendering/Cargo.toml @@ -45,7 +45,7 @@ wgpu-core.workspace = true foreign-types = "0.5" [target.'cfg(target_os = "windows")'.dependencies] -wgpu-hal = { workspace = true, features = ["dx12"] } +wgpu-hal = { workspace = true } wgpu-core.workspace = true windows = { workspace = true, features = [ "Win32_Foundation", @@ -55,6 +55,8 @@ windows = { workspace = true, features = [ "Win32_Graphics_Dxgi", "Win32_Graphics_Dxgi_Common", "Win32_System_Threading", + "Win32_System_Com", + "Win32_System_LibraryLoader", ] } windows_0_58 = { package = "windows", version = "0.58", features = [ "Win32_Foundation", diff --git a/crates/rendering/src/decoder/ffmpeg.rs b/crates/rendering/src/decoder/ffmpeg.rs index c664f7a5bd..097bd677d8 100644 --- a/crates/rendering/src/decoder/ffmpeg.rs +++ b/crates/rendering/src/decoder/ffmpeg.rs @@ -20,6 +20,9 @@ use super::{ frame_converter::FrameConverter, pts_to_frame, }; +#[cfg(target_os = "windows")] +use windows::Win32::System::Com::{COINIT_MULTITHREADED, CoInitializeEx, CoUninitialize}; + #[derive(Clone)] struct ProcessedFrame { number: u32, @@ -199,6 +202,12 @@ impl FfmpegDecoder { let (continue_tx, continue_rx) = mpsc::channel::>(); std::thread::spawn(move || { + #[cfg(target_os = "windows")] + let com_initialized = unsafe { + let hr = CoInitializeEx(None, COINIT_MULTITHREADED); + hr.is_ok() + }; + let hw_device_type = if use_hw_acceleration { #[cfg(target_os = "windows")] { @@ -813,6 +822,13 @@ impl FfmpegDecoder { } } } + + #[cfg(target_os = "windows")] + if com_initialized { + unsafe { + CoUninitialize(); + } + } }); continue_rx.recv().map_err(|e| e.to_string())?.map(|_| ()) diff --git a/crates/rendering/src/layers/camera.rs b/crates/rendering/src/layers/camera.rs index 700bf52c41..6897c919e0 100644 --- a/crates/rendering/src/layers/camera.rs +++ b/crates/rendering/src/layers/camera.rs @@ -21,11 +21,13 @@ pub struct CameraLayer { } impl CameraLayer { - pub fn new(device: &wgpu::Device) -> Self { + #[allow(dead_code)] + pub fn new(device: &wgpu::Device, is_software_adapter: bool) -> Self { Self::new_with_all_shared_pipelines( device, Arc::new(YuvConverterPipelines::new(device)), Arc::new(CompositeVideoFramePipeline::new(device)), + is_software_adapter, ) } @@ -33,6 +35,7 @@ impl CameraLayer { device: &wgpu::Device, yuv_pipelines: Arc, composite_pipeline: Arc, + is_software_adapter: bool, ) -> Self { let frame_texture_0 = CompositeVideoFramePipeline::create_frame_texture(device, 1920, 1080); let frame_texture_1 = CompositeVideoFramePipeline::create_frame_texture(device, 1920, 1080); @@ -52,7 +55,11 @@ impl CameraLayer { let bind_group_1 = Some(composite_pipeline.bind_group(device, &uniforms_buffer, &frame_texture_view_1)); - let yuv_converter = YuvToRgbaConverter::new_with_shared_pipelines(device, yuv_pipelines); + let yuv_converter = YuvToRgbaConverter::new_with_shared_pipelines( + device, + yuv_pipelines, + is_software_adapter, + ); Self { frame_textures: [frame_texture_0, frame_texture_1], diff --git a/crates/rendering/src/layers/display.rs b/crates/rendering/src/layers/display.rs index f2f3df3a40..c77abedec7 100644 --- a/crates/rendering/src/layers/display.rs +++ b/crates/rendering/src/layers/display.rs @@ -5,6 +5,7 @@ use crate::{ composite_frame::{CompositeVideoFramePipeline, CompositeVideoFrameUniforms}, yuv_converter::YuvToRgbaConverter, }; +use std::sync::Arc; struct PendingTextureCopy { width: u32, @@ -27,11 +28,30 @@ pub struct DisplayLayer { impl DisplayLayer { #[allow(dead_code)] - pub fn new(device: &wgpu::Device) -> Self { - Self::new_with_options(device, false) + pub fn new(device: &wgpu::Device, is_software_adapter: bool) -> Self { + Self::new_with_options(device, false, is_software_adapter) } - pub fn new_with_options(device: &wgpu::Device, prefer_cpu_conversion: bool) -> Self { + pub fn new_with_options( + device: &wgpu::Device, + prefer_cpu_conversion: bool, + is_software_adapter: bool, + ) -> Self { + let pipelines = Arc::new(crate::yuv_converter::YuvConverterPipelines::new(device)); + Self::new_with_shared_pipelines( + device, + pipelines, + prefer_cpu_conversion, + is_software_adapter, + ) + } + + pub fn new_with_shared_pipelines( + device: &wgpu::Device, + yuv_pipelines: Arc, + prefer_cpu_conversion: bool, + is_software_adapter: bool, + ) -> Self { let frame_texture_0 = CompositeVideoFramePipeline::create_frame_texture(device, 1920, 1080); let frame_texture_1 = CompositeVideoFramePipeline::create_frame_texture(device, 1920, 1080); let frame_texture_view_0 = frame_texture_0.create_view(&Default::default()); @@ -44,7 +64,11 @@ impl DisplayLayer { let bind_group_1 = Some(pipeline.bind_group(device, &uniforms_buffer, &frame_texture_view_1)); - let yuv_converter = YuvToRgbaConverter::new(device); + let yuv_converter = YuvToRgbaConverter::new_with_shared_pipelines( + device, + yuv_pipelines, + is_software_adapter, + ); if prefer_cpu_conversion { tracing::info!("DisplayLayer initialized with CPU YUV conversion preference"); diff --git a/crates/rendering/src/lib.rs b/crates/rendering/src/lib.rs index 9848f21859..3cafcb1947 100644 --- a/crates/rendering/src/lib.rs +++ b/crates/rendering/src/lib.rs @@ -19,6 +19,7 @@ use spring_mass_damper::SpringMassDamperSimulationConfig; use std::{collections::HashMap, sync::Arc}; use std::{path::PathBuf, time::Instant}; use tokio::sync::mpsc; +use yuv_converter::YuvConverterPipelines; mod composite_frame; mod coord; @@ -338,6 +339,7 @@ pub async fn render_video_to_channel( &constants.device, &constants.queue, constants.is_software_adapter, + constants.is_software_adapter, ); if let Some(first_segment) = render_segments.first() { @@ -1891,21 +1893,42 @@ pub struct RendererLayers { impl RendererLayers { pub fn new(device: &wgpu::Device, queue: &wgpu::Queue) -> Self { - Self::new_with_options(device, queue, false) + Self::new_with_options(device, queue, false, false) } pub fn new_with_options( device: &wgpu::Device, queue: &wgpu::Queue, prefer_cpu_conversion: bool, + is_software_adapter: bool, ) -> Self { + let yuv_pipelines = Arc::new(YuvConverterPipelines::new(device)); + let composite_pipeline = Arc::new( + crate::composite_frame::CompositeVideoFramePipeline::new(device), + ); + Self { background: BackgroundLayer::new(device), background_blur: BlurLayer::new(device), - display: DisplayLayer::new_with_options(device, prefer_cpu_conversion), + display: DisplayLayer::new_with_shared_pipelines( + device, + yuv_pipelines.clone(), + prefer_cpu_conversion, + is_software_adapter, + ), cursor: CursorLayer::new(device), - camera: CameraLayer::new(device), - camera_only: CameraLayer::new(device), + camera: CameraLayer::new_with_all_shared_pipelines( + device, + yuv_pipelines.clone(), + composite_pipeline.clone(), + is_software_adapter, + ), + camera_only: CameraLayer::new_with_all_shared_pipelines( + device, + yuv_pipelines, + composite_pipeline, + is_software_adapter, + ), mask: MaskLayer::new(device), text: TextLayer::new(device, queue), captions: CaptionsLayer::new(device, queue), diff --git a/crates/rendering/src/yuv_converter.rs b/crates/rendering/src/yuv_converter.rs index e93c29f30a..588a4387a6 100644 --- a/crates/rendering/src/yuv_converter.rs +++ b/crates/rendering/src/yuv_converter.rs @@ -86,7 +86,7 @@ const MAX_TEXTURE_WIDTH: u32 = 7680; const MAX_TEXTURE_HEIGHT: u32 = 4320; const INITIAL_TEXTURE_WIDTH: u32 = 1920; -const INITIAL_TEXTURE_HEIGHT: u32 = 1080; +const INITIAL_TEXTURE_HEIGHT: u32 = 1088; const TEXTURE_SIZE_PADDING: u32 = 64; @@ -420,17 +420,19 @@ pub struct YuvToRgbaConverter { d3d11_staging_height: u32, #[cfg(target_os = "windows")] zero_copy_failed: bool, + is_software_adapter: bool, } impl YuvToRgbaConverter { - pub fn new(device: &wgpu::Device) -> Self { + pub fn new(device: &wgpu::Device, is_software_adapter: bool) -> Self { let pipelines = Arc::new(YuvConverterPipelines::new(device)); - Self::new_with_shared_pipelines(device, pipelines) + Self::new_with_shared_pipelines(device, pipelines, is_software_adapter) } pub fn new_with_shared_pipelines( device: &wgpu::Device, pipelines: Arc, + is_software_adapter: bool, ) -> Self { let gpu_max_texture_size = device.limits().max_texture_dimension_2d; @@ -476,6 +478,7 @@ impl YuvToRgbaConverter { d3d11_staging_height: 0, #[cfg(target_os = "windows")] zero_copy_failed: false, + is_software_adapter, } } @@ -1206,6 +1209,7 @@ impl YuvToRgbaConverter { height: u32, ) -> Result<&wgpu::TextureView, YuvConversionError> { if !self.zero_copy_failed + && !self.is_software_adapter && let (Some(y_h), Some(uv_h)) = (y_handle, uv_handle) { match self.convert_nv12_from_d3d11_shared_handles( diff --git a/extensions/raycast/extension-icon.png b/extensions/raycast/extension-icon.png new file mode 100644 index 0000000000..f6fb22ca7a Binary files /dev/null and b/extensions/raycast/extension-icon.png differ diff --git a/extensions/raycast/package.json b/extensions/raycast/package.json new file mode 100644 index 0000000000..de290136bd --- /dev/null +++ b/extensions/raycast/package.json @@ -0,0 +1,57 @@ +{ + "$schema": "https://www.raycast.com/schemas/extension.json", + "name": "cap-control", + "title": "Cap Control", + "description": "Control Cap screen recorder", + "icon": "extension-icon.png", + "author": "cap_user", + "categories": [ + "Productivity", + "Applications" + ], + "license": "MIT", + "commands": [ + { + "name": "start-recording", + "title": "Start Recording", + "description": "Start a new recording in Cap", + "mode": "no-view" + }, + { + "name": "stop-recording", + "title": "Stop Recording", + "description": "Stop current recording in Cap", + "mode": "no-view" + }, + { + "name": "pause-recording", + "title": "Pause Recording", + "description": "Pause current recording", + "mode": "no-view" + }, + { + "name": "resume-recording", + "title": "Resume Recording", + "description": "Resume current recording", + "mode": "no-view" + } + ], + "dependencies": { + "@raycast/api": "^1.66.0" + }, + "devDependencies": { + "@raycast/eslint-config": "^1.0.6", + "@types/node": "20.8.10", + "@types/react": "18.2.27", + "eslint": "^8.51.0", + "prettier": "^3.0.3", + "typescript": "^5.2.2" + }, + "scripts": { + "build": "ray build -e dist", + "dev": "ray develop", + "fix-lint": "ray lint --fix", + "lint": "ray lint", + "publish": "npx @raycast/api@latest publish" + } +} diff --git a/extensions/raycast/src/pause-recording.ts b/extensions/raycast/src/pause-recording.ts new file mode 100644 index 0000000000..3582e01a41 --- /dev/null +++ b/extensions/raycast/src/pause-recording.ts @@ -0,0 +1,10 @@ +import { open, showHUD } from "@raycast/api"; + +export default async function Command() { + try { + await open("cap://pause"); + await showHUD("Pausing Cap recording..."); + } catch (_error) { + await showHUD("Failed to open Cap"); + } +} diff --git a/extensions/raycast/src/resume-recording.ts b/extensions/raycast/src/resume-recording.ts new file mode 100644 index 0000000000..f006f7ad12 --- /dev/null +++ b/extensions/raycast/src/resume-recording.ts @@ -0,0 +1,10 @@ +import { open, showHUD } from "@raycast/api"; + +export default async function Command() { + try { + await open("cap://resume"); + await showHUD("Resuming Cap recording..."); + } catch (_error) { + await showHUD("Failed to open Cap"); + } +} diff --git a/extensions/raycast/src/start-recording.ts b/extensions/raycast/src/start-recording.ts new file mode 100644 index 0000000000..e5ef63267b --- /dev/null +++ b/extensions/raycast/src/start-recording.ts @@ -0,0 +1,10 @@ +import { open, showHUD } from "@raycast/api"; + +export default async function Command() { + try { + await open("cap://record"); + await showHUD("Starting Cap recording..."); + } catch (_error) { + await showHUD("Failed to open Cap"); + } +} diff --git a/extensions/raycast/src/stop-recording.ts b/extensions/raycast/src/stop-recording.ts new file mode 100644 index 0000000000..1fdbf6ea21 --- /dev/null +++ b/extensions/raycast/src/stop-recording.ts @@ -0,0 +1,10 @@ +import { open, showHUD } from "@raycast/api"; + +export default async function Command() { + try { + await open("cap://stop"); + await showHUD("Stopping Cap recording..."); + } catch (_error) { + await showHUD("Failed to open Cap"); + } +} diff --git a/extensions/raycast/tsconfig.json b/extensions/raycast/tsconfig.json new file mode 100644 index 0000000000..dd6b135e3a --- /dev/null +++ b/extensions/raycast/tsconfig.json @@ -0,0 +1,13 @@ +{ + "compilerOptions": { + "target": "es2022", + "lib": ["ES2022", "DOM", "DOM.Iterable"], + "module": "commonjs", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "jsx": "react-jsx" + }, + "include": ["src/**/*"] +} diff --git a/metadata.json b/metadata.json new file mode 100644 index 0000000000..464b5f2b77 Binary files /dev/null and b/metadata.json differ diff --git a/scripts/setup.js b/scripts/setup.js index be1b5ed6ea..bfc3ad5f5e 100644 --- a/scripts/setup.js +++ b/scripts/setup.js @@ -90,7 +90,7 @@ async function main() { } console.log("Copied ffmpeg dylibs to target/debug"); } else if (process.platform === "win32") { - const FFMPEG_VERSION = "7.1"; + const FFMPEG_VERSION = "6.1"; const FFMPEG_ZIP_NAME = `ffmpeg-${FFMPEG_VERSION}-full_build-shared`; const FFMPEG_ZIP_URL = `https://github.com/GyanD/codexffmpeg/releases/download/${FFMPEG_VERSION}/${FFMPEG_ZIP_NAME}.zip`; @@ -156,10 +156,11 @@ async function main() { { shell: "powershell.exe" }, ); - const libclangPath = path.join( - vcInstallDir.trim(), - "VC/Tools/LLVM/x64/bin/libclang.dll", - ); + const libclangPath = (await fileExists( + "C:/Program Files/LLVM/bin/libclang.dll", + )) + ? "C:/Program Files/LLVM/bin/libclang.dll" + : path.join(vcInstallDir.trim(), "VC/Tools/LLVM/x64/bin/libclang.dll"); cargoConfigContents += `LIBCLANG_PATH = "${libclangPath.replaceAll( "\\",