From 5ac9a8e9e0babb5958a48610bb2e078e88100acb Mon Sep 17 00:00:00 2001 From: Matt Toohey Date: Wed, 13 May 2026 17:09:25 +1000 Subject: [PATCH 1/2] feat(staged): add browser-accessible web mode with transport abstraction Restore the full Axum HTTPS web server (previously stubbed) and implement the frontend transport layer for running Staged in a browser. This enables phone and desktop browser access to a running Staged instance. Key changes: - Unstub web_server::start() with TLS listener, static file serving, and auth-protected API routes - Add HTTP transport in invokeCommand() that POSTs to /api/invoke/{command} with automatic 401 redirect to login - Add WebSocket singleton for server-sent events in web mode - Add WebLogin.svelte token entry screen with /api/auth session cookie flow - Implement localStorage backend for persistentStore in web mode - Expose web access token via Tauri command and settings UI with copy button - Add `just dev-web` recipe requiring HTTPS cert/key env vars - Add writeClipboardText/readClipboardText web fallbacks in transport layer Co-Authored-By: Claude Opus 4.6 (1M context) Signed-off-by: Matt Toohey --- apps/staged/justfile | 29 +++ apps/staged/package.json | 2 +- apps/staged/src-tauri/Cargo.lock | 4 +- apps/staged/src-tauri/src/lib.rs | 19 +- apps/staged/src-tauri/src/web_server.rs | 74 ++++++-- apps/staged/src/App.svelte | 31 +++- apps/staged/src/lib/commands.ts | 9 + .../src/lib/features/layout/WebLogin.svelte | 137 ++++++++++++++ .../lib/features/settings/SettingsPage.svelte | 80 ++++++++- apps/staged/src/lib/shared/persistentStore.ts | 43 ++++- apps/staged/src/lib/transport.ts | 167 ++++++++++++++++-- apps/staged/vite.config.ts | 30 ++++ 12 files changed, 588 insertions(+), 37 deletions(-) create mode 100644 apps/staged/src/lib/features/layout/WebLogin.svelte diff --git a/apps/staged/justfile b/apps/staged/justfile index a7009b6b4..6e833633c 100644 --- a/apps/staged/justfile +++ b/apps/staged/justfile @@ -53,6 +53,35 @@ dev repo="": {{ if repo != "" { "export STAGED_REPO=" + repo } else { "" } }} pnpm exec tauri dev --config "$TAURI_CONFIG" +# Run with the HTTPS web server enabled for phone/browser access. +# Requires PEM cert/key files and a hostname covered by the certificate. +dev-web repo="": + #!/usr/bin/env bash + set -euo pipefail + + [[ -d node_modules ]] || pnpm install + + if [[ -z "${STAGED_WEB_CERT_PATH:-}" || -z "${STAGED_WEB_KEY_PATH:-}" || -z "${STAGED_WEB_HOST:-}" ]]; then + printf '%s\n' \ + 'Error: `just dev-web` serves browser access over HTTPS.' \ + 'Provide PEM certificate/key files and a hostname covered by the certificate:' \ + '' \ + ' STAGED_WEB_CERT_PATH=/path/to/cert.pem \' \ + ' STAGED_WEB_KEY_PATH=/path/to/key.pem \' \ + ' STAGED_WEB_HOST=hostname.example.com \' \ + ' just dev-web' >&2 + exit 1 + fi + + VITE_PORT=$(python3 -c "import hashlib,os; h=int(hashlib.sha256(os.getcwd().encode()).hexdigest(),16); print(10000 + h % 55000)") + export VITE_PORT + export STAGED_WEB_SERVER=1 + TAURI_CONFIG="{\"build\":{\"devUrl\":\"https://${STAGED_WEB_HOST}:${VITE_PORT}\",\"beforeDevCommand\":\"exec ./node_modules/.bin/vite --port ${VITE_PORT} --strictPort --host 0.0.0.0\"}}" + + echo "Starting on https://${STAGED_WEB_HOST}:${VITE_PORT} (HTTPS web server on :5175)" + {{ if repo != "" { "export STAGED_REPO=" + repo } else { "" } }} + pnpm exec tauri dev --config "$TAURI_CONFIG" + # Build the app for production build: pnpm run tauri:build diff --git a/apps/staged/package.json b/apps/staged/package.json index 50588dec0..2916ad0db 100644 --- a/apps/staged/package.json +++ b/apps/staged/package.json @@ -5,7 +5,7 @@ "type": "module", "scripts": { "dev": "vite", - + "dev:web": "vite --host 0.0.0.0", "build": "vite build", "preview": "vite preview", "check": "svelte-check --tsconfig ./tsconfig.app.json --fail-on-warnings && tsc -p tsconfig.node.json", diff --git a/apps/staged/src-tauri/Cargo.lock b/apps/staged/src-tauri/Cargo.lock index 2f914d9b4..8e038426c 100644 --- a/apps/staged/src-tauri/Cargo.lock +++ b/apps/staged/src-tauri/Cargo.lock @@ -1121,9 +1121,9 @@ dependencies = [ [[package]] name = "data-encoding" -version = "2.11.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4ae5f15dda3c708c0ade84bfee31ccab44a3da4f88015ed22f63732abe300c8" +checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea" [[package]] name = "deranged" diff --git a/apps/staged/src-tauri/src/lib.rs b/apps/staged/src-tauri/src/lib.rs index 5ee55853b..9a61c9c1b 100644 --- a/apps/staged/src-tauri/src/lib.rs +++ b/apps/staged/src-tauri/src/lib.rs @@ -51,6 +51,10 @@ struct DbState { needs_reset: Mutex>, } +/// Holds the bearer token for web server authentication so it can be +/// retrieved by the frontend (Tauri command) and shown to the user. +struct WebAccessToken(String); + #[derive(Default)] struct ShutdownState { quit_in_progress: AtomicBool, @@ -260,6 +264,12 @@ fn stop_actions_for_app_shutdown(app_handle: &tauri::AppHandle) { // Store status commands // ============================================================================= +/// Returns the bearer token used to authenticate web browser clients. +#[tauri::command] +fn get_web_access_token(token: tauri::State<'_, WebAccessToken>) -> String { + token.0.clone() +} + /// Returns null if the store is ready, or version info if a reset is needed. #[tauri::command] fn get_store_status(db_state: tauri::State<'_, DbState>) -> Option { @@ -1752,14 +1762,16 @@ pub fn run() { let (event_tx, _) = tokio::sync::broadcast::channel::(256); app.manage(event_tx.clone()); - // Web server startup is stubbed out in this build. - // TODO(web): restore web server startup from the `mobile-web` branch. + // Start the Axum web server only when opted-in via environment variable. + // This avoids exposing an HTTP server on all interfaces for users who + // don't need browser-based access. let web_server_enabled = std::env::var("STAGED_WEB_SERVER") .map(|v| v == "1" || v.eq_ignore_ascii_case("true")) .unwrap_or(false); if web_server_enabled { let auth_token = web_server::generate_token(); + app.manage(WebAccessToken(auth_token.clone())); web_server::start(web_server::WebAppState { app_handle: app.handle().clone(), event_tx, @@ -1768,6 +1780,8 @@ pub fn run() { std::collections::HashSet::new(), )), }); + } else { + app.manage(WebAccessToken(String::new())); } if cfg!(debug_assertions) { @@ -1798,6 +1812,7 @@ pub fn run() { } }) .invoke_handler(tauri::generate_handler![ + get_web_access_token, get_store_status, confirm_reset_store, list_projects, diff --git a/apps/staged/src-tauri/src/web_server.rs b/apps/staged/src-tauri/src/web_server.rs index cce61cdae..8f088bd61 100644 --- a/apps/staged/src-tauri/src/web_server.rs +++ b/apps/staged/src-tauri/src/web_server.rs @@ -11,10 +11,6 @@ //! All `/api/*` routes (except `/api/auth`) require authentication via either //! an `Authorization: Bearer ` header or a valid `staged_session` cookie. -// The full implementation is preserved here but start() is currently stubbed out, -// so most items appear unused to the compiler. -#![allow(dead_code, unused_imports)] - use std::collections::HashSet; use std::io; use std::net::SocketAddr; @@ -188,14 +184,68 @@ impl Listener for TlsListener { /// Start the Axum web server in a background tokio task. /// -/// Stubbed — logs a warning and returns. The full implementation (TLS listener, -/// Axum router with static file serving) is intentionally disabled in this build. -/// All route handlers, auth middleware, and the `dispatch()` match block are kept -/// compiling so they stay in sync with the rest of the codebase. -/// -/// TODO(web): restore full web server startup from the `mobile-web` branch. -pub fn start(_state: WebAppState) { - log::warn!("Web server requested but this build has the web server stubbed out"); +/// This should be called from the Tauri `setup` hook after all managed state +/// has been registered. +pub fn start(state: WebAppState) { + let token = state.auth_token.clone(); + tauri::async_runtime::spawn(async move { + let dist_dir = std::env::current_exe() + .ok() + .and_then(|p| p.parent().map(|p| p.to_path_buf())) + // In dev, the exe is in src-tauri/target/debug; dist is at ../../dist relative to src-tauri + .map(|p| { + // Try multiple candidate paths for the built frontend + let candidates = vec![ + p.join("../dist"), // production bundle + p.join("../../../../dist"), // dev (target/debug -> src-tauri -> apps/staged -> dist) + PathBuf::from("../dist"), // relative to cwd + ]; + candidates + .into_iter() + .find(|c| c.exists()) + .unwrap_or_else(|| PathBuf::from("../dist")) + }) + .unwrap_or_else(|| PathBuf::from("../dist")); + + // Protected API routes require auth (Bearer token or session cookie) + let api_routes = Router::new() + .route("/api/invoke/{command}", post(invoke_command)) + .route("/api/events", get(ws_events)) + .route_layer(middleware::from_fn_with_state(state.clone(), require_auth)); + + // Auth endpoint is public (it's where you submit the token) + let auth_route = Router::new().route("/api/auth", post(authenticate)); + + let app = api_routes + .merge(auth_route) + .fallback_service(ServeDir::new(&dist_dir).append_index_html_on_directories(true)) + .layer(CorsLayer::permissive()) + .with_state(state); + + let addr = "0.0.0.0:5175"; + let tls_acceptor = match load_tls_acceptor() { + Ok(acceptor) => acceptor, + Err(e) => { + log::error!("[web_server] {e}"); + return; + } + }; + log::info!( + "[web_server] starting HTTPS on {addr}, serving static files from {}", + dist_dir.display() + ); + log::info!("[web_server] web access token: {token}"); + let listener = match tokio::net::TcpListener::bind(addr).await { + Ok(l) => l, + Err(e) => { + log::error!("[web_server] failed to bind {addr}: {e}"); + return; + } + }; + if let Err(e) = axum::serve(TlsListener::new(listener, tls_acceptor), app).await { + log::error!("[web_server] server error: {e}"); + } + }); } // ============================================================================= diff --git a/apps/staged/src/App.svelte b/apps/staged/src/App.svelte index cf9eca382..a57db2568 100644 --- a/apps/staged/src/App.svelte +++ b/apps/staged/src/App.svelte @@ -7,6 +7,7 @@ -{#if preferences.loaded} +{#if showLogin} + +{:else if preferences.loaded} {#if storeIncompat && storeIncompat.kind === 'needs_reset'}
diff --git a/apps/staged/src/lib/commands.ts b/apps/staged/src/lib/commands.ts index bcabd7ff2..e544eeb70 100644 --- a/apps/staged/src/lib/commands.ts +++ b/apps/staged/src/lib/commands.ts @@ -41,6 +41,15 @@ export interface WorktreeChangesPreview { conflictedPaths: string[]; } +// ============================================================================= +// Web access +// ============================================================================= + +/** Returns the bearer token for web server authentication (Tauri-only). */ +export function getWebAccessToken(): Promise { + return invokeCommand('get_web_access_token'); +} + // ============================================================================= // Store status // ============================================================================= diff --git a/apps/staged/src/lib/features/layout/WebLogin.svelte b/apps/staged/src/lib/features/layout/WebLogin.svelte new file mode 100644 index 000000000..ea10a83e0 --- /dev/null +++ b/apps/staged/src/lib/features/layout/WebLogin.svelte @@ -0,0 +1,137 @@ + + + + + + diff --git a/apps/staged/src/lib/features/settings/SettingsPage.svelte b/apps/staged/src/lib/features/settings/SettingsPage.svelte index 321a7f850..4262db755 100644 --- a/apps/staged/src/lib/features/settings/SettingsPage.svelte +++ b/apps/staged/src/lib/features/settings/SettingsPage.svelte @@ -6,9 +6,12 @@ import DoctorSettingsPanel from './DoctorSettingsPanel.svelte'; import GeneralSettingsPanel from './GeneralSettingsPanel.svelte'; import KeyboardSettingsPanel from './KeyboardSettingsPanel.svelte'; - import { isTauri } from '../../transport'; + import { isTauri, writeClipboardText } from '../../transport'; + import * as commands from '../../commands'; let appVersion = $state(__APP_VERSION__); + let webToken = $state(null); + let tokenCopied = $state(false); onMount(async () => { if (!isTauri) return; @@ -19,8 +22,21 @@ } catch (error) { console.warn('[Settings] Could not load runtime app version', error); } + + try { + webToken = await commands.getWebAccessToken(); + } catch { + // web server may not be running + } }); + async function copyToken() { + if (!webToken) return; + await writeClipboardText(webToken); + tokenCopied = true; + setTimeout(() => (tokenCopied = false), 2000); + } + function handleBack() { closeSettings(); } @@ -94,6 +110,18 @@
+ + {#if webToken} +
+ Web Access Token +
+ {webToken.slice(0, 8)}... + +
+
+ {/if}
@@ -303,5 +331,55 @@ .nav-meta { display: none; } + + .web-token-section { + display: none; + } + } + + .web-token-section { + margin-top: auto; + padding: 12px; + border-top: 1px solid var(--border-subtle); + } + + .web-token-label { + font-size: var(--size-xs); + color: var(--text-muted); + display: block; + margin-bottom: 6px; + } + + .web-token-row { + display: flex; + align-items: center; + gap: 8px; + } + + .web-token-value { + font-size: var(--size-xs); + color: var(--text-faint); + background: var(--bg-deepest); + padding: 2px 6px; + border-radius: 3px; + flex: 1; + overflow: hidden; + text-overflow: ellipsis; + } + + .web-token-copy { + background: none; + border: 1px solid var(--border-muted); + border-radius: 4px; + color: var(--text-muted); + font-size: var(--size-xs); + padding: 2px 8px; + cursor: pointer; + white-space: nowrap; + } + + .web-token-copy:hover { + color: var(--text-primary); + border-color: var(--border-emphasis); } diff --git a/apps/staged/src/lib/shared/persistentStore.ts b/apps/staged/src/lib/shared/persistentStore.ts index b0bf6a897..b8eb96927 100644 --- a/apps/staged/src/lib/shared/persistentStore.ts +++ b/apps/staged/src/lib/shared/persistentStore.ts @@ -22,15 +22,20 @@ interface TauriStoreBackend { } // --------------------------------------------------------------------------- -// localStorage backend (web mode) — stubbed out -// TODO(web): restore localStorage backend from the `mobile-web` branch +// localStorage backend (web mode) // --------------------------------------------------------------------------- +const LOCAL_STORAGE_PREFIX = 'staged:pref:'; + +interface LocalStorageBackend { + kind: 'localStorage'; +} + // --------------------------------------------------------------------------- // Singleton // --------------------------------------------------------------------------- -type StoreBackend = TauriStoreBackend | null; +type StoreBackend = TauriStoreBackend | LocalStorageBackend | null; let backend: StoreBackend = null; @@ -50,8 +55,9 @@ export async function initPersistentStore(): Promise { overrideDefaults: true, }); backend = { kind: 'tauri', store }; + } else { + backend = { kind: 'localStorage' }; } - // TODO(web): restore localStorage backend initialization for web mode } /** @@ -64,7 +70,18 @@ export async function getStoreValue(key: string): Promise { return undefined; } - return backend.store.get(key); + if (backend.kind === 'tauri') { + return backend.store.get(key); + } + + // localStorage backend + const raw = localStorage.getItem(LOCAL_STORAGE_PREFIX + key); + if (raw === null) return undefined; + try { + return JSON.parse(raw) as T; + } catch { + return undefined; + } } /** @@ -77,7 +94,13 @@ export async function setStoreValue(key: string, value: T): Promise { return; } - await backend.store.set(key, value); + if (backend.kind === 'tauri') { + await backend.store.set(key, value); + return; + } + + // localStorage backend + localStorage.setItem(LOCAL_STORAGE_PREFIX + key, JSON.stringify(value)); } /** @@ -89,5 +112,11 @@ export async function deleteStoreValue(key: string): Promise { return; } - await backend.store.delete(key); + if (backend.kind === 'tauri') { + await backend.store.delete(key); + return; + } + + // localStorage backend + localStorage.removeItem(LOCAL_STORAGE_PREFIX + key); } diff --git a/apps/staged/src/lib/transport.ts b/apps/staged/src/lib/transport.ts index 2661521c6..9f0e187d2 100644 --- a/apps/staged/src/lib/transport.ts +++ b/apps/staged/src/lib/transport.ts @@ -6,9 +6,6 @@ * - Event listening (Tauri events vs WebSocket) * - Window management (Tauri window vs no-op) * - Clipboard (Tauri plugin vs navigator.clipboard) - * - * NOTE: Web-mode implementations are intentionally stubbed out in this build. - * TODO(web): restore web transport paths from the `mobile-web` branch. */ // --------------------------------------------------------------------------- @@ -23,7 +20,7 @@ export const isTauri: boolean = typeof window !== 'undefined' && '__TAURI__' in /** * Invoke a backend command. In Tauri mode this calls `invoke()` from the - * Tauri API; in web mode it is stubbed out. + * Tauri API; in web mode it POSTs to `/api/invoke/{command}`. */ export async function invokeCommand( command: string, @@ -34,8 +31,62 @@ export async function invokeCommand( return invoke(command, args); } - // TODO(web): restore HTTP transport from the `mobile-web` branch - throw new Error(`[transport] Web mode is not available in this build (command: ${command})`); + const response = await fetch(`/api/invoke/${command}`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(args ?? {}), + }); + + if (response.status === 401) { + redirectToLogin(); + throw new Error('Authentication required'); + } + + if (!response.ok) { + // The Axum server returns `{ "error": "..." }` JSON for BAD_REQUEST responses. + // Parse the JSON and extract the error field for a clean error message. + const text = await response.text(); + try { + const body = JSON.parse(text); + if (body?.error) { + throw new Error(body.error); + } + } catch (e) { + if (e instanceof Error && e.message !== text) throw e; + } + throw new Error(text); + } + + return response.json(); +} + +// --------------------------------------------------------------------------- +// Web authentication +// --------------------------------------------------------------------------- + +let loginRedirectPending = false; + +function redirectToLogin(): void { + if (loginRedirectPending) return; + loginRedirectPending = true; + // Use a small delay to batch multiple 401s that fire simultaneously + setTimeout(() => { + window.location.hash = '#/login'; + loginRedirectPending = false; + }, 50); +} + +/** + * Submit a bearer token to the web server's auth endpoint. + * On success the server sets a session cookie and subsequent requests are authenticated. + */ +export async function submitWebToken(token: string): Promise { + const response = await fetch('/api/auth', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ token }), + }); + return response.ok; } // --------------------------------------------------------------------------- @@ -46,7 +97,7 @@ export type UnlistenFn = () => void; /** * Listen to a backend event. In Tauri mode this delegates to the Tauri event - * API; in web mode it is stubbed out. + * API; in web mode it connects to a shared WebSocket and filters by event name. */ export async function listenToEvent( event: string, @@ -57,9 +108,100 @@ export async function listenToEvent( return listen(event, (e) => callback(e.payload)); } - // TODO(web): restore WebSocket event transport from the `mobile-web` branch - console.warn(`[transport] Web mode event listening stubbed out (event: ${event})`); - return () => {}; + return webSocketListen(event, callback); +} + +// --------------------------------------------------------------------------- +// WebSocket singleton for web-mode events +// --------------------------------------------------------------------------- + +interface WebSocketListener { + event: string; + callback: (payload: unknown) => void; +} + +let ws: WebSocket | null = null; +let wsListeners: WebSocketListener[] = []; +let wsReconnectTimer: ReturnType | null = null; +let wsConnecting = false; + +function getWsUrl(): string { + const protocol = location.protocol === 'https:' ? 'wss:' : 'ws:'; + return `${protocol}//${location.host}/api/events`; +} + +function ensureWebSocket(): void { + if (ws && (ws.readyState === WebSocket.OPEN || ws.readyState === WebSocket.CONNECTING)) { + return; + } + if (wsConnecting) return; + + wsConnecting = true; + ws = new WebSocket(getWsUrl()); + + ws.onopen = () => { + wsConnecting = false; + if (wsReconnectTimer) { + clearTimeout(wsReconnectTimer); + wsReconnectTimer = null; + } + }; + + ws.onmessage = (messageEvent) => { + try { + const data = JSON.parse(messageEvent.data) as { event: string; payload: unknown }; + for (const listener of wsListeners) { + if (listener.event === data.event) { + listener.callback(data.payload); + } + } + } catch { + console.warn('[transport] Failed to parse WebSocket message:', messageEvent.data); + } + }; + + ws.onclose = () => { + wsConnecting = false; + // Auto-reconnect if there are still listeners + if (wsListeners.length > 0 && !wsReconnectTimer) { + wsReconnectTimer = setTimeout(() => { + wsReconnectTimer = null; + if (wsListeners.length > 0) { + ensureWebSocket(); + } + }, 2000); + } + }; + + ws.onerror = () => { + wsConnecting = false; + // onclose will fire after onerror, which handles reconnect + }; +} + +function webSocketListen(event: string, callback: (payload: T) => void): UnlistenFn { + const listener: WebSocketListener = { + event, + callback: callback as (payload: unknown) => void, + }; + + wsListeners.push(listener); + ensureWebSocket(); + + return () => { + wsListeners = wsListeners.filter((l) => l !== listener); + // Tear down WebSocket when no listeners remain + if (wsListeners.length === 0) { + if (wsReconnectTimer) { + clearTimeout(wsReconnectTimer); + wsReconnectTimer = null; + } + if (ws) { + ws.close(); + ws = null; + } + } + }; } // --------------------------------------------------------------------------- @@ -76,6 +218,7 @@ interface WindowHandle { const noopWindow: WindowHandle = { show: async () => {}, close: async () => { + // In browser mode, just close the tab/window window.close(); }, startDragging: async () => {}, @@ -84,7 +227,7 @@ const noopWindow: WindowHandle = { /** * Get a handle to the current window. In Tauri mode this returns the real - * Tauri window; in web mode it returns a no-op implementation. + * Tauri window; in web mode it returns a no-op (or limited) implementation. */ export async function getWindow(): Promise { if (isTauri) { @@ -102,6 +245,7 @@ export async function getWindow(): Promise { export function getWindowSync(): WindowHandle { if (!isTauri) return noopWindow; + // Return a proxy that lazily imports the Tauri window API return { show: async () => { const { getCurrentWindow } = await import('@tauri-apps/api/window'); @@ -156,5 +300,6 @@ export async function onDragDropEvent( // eslint-disable-next-line @typescript-eslint/no-explicit-any return getCurrentWebview().onDragDropEvent(callback as any); } + // No-op in web mode — native file drag is a Tauri-only feature return () => {}; } diff --git a/apps/staged/vite.config.ts b/apps/staged/vite.config.ts index a63c689b5..8fab243e1 100644 --- a/apps/staged/vite.config.ts +++ b/apps/staged/vite.config.ts @@ -7,6 +7,24 @@ const port = parseInt(process.env.VITE_PORT || '5174', 10); const packageJson = JSON.parse( readFileSync(resolve(import.meta.dirname, 'package.json'), 'utf8') ) as { version: string }; +const webCertPath = process.env.STAGED_WEB_CERT_PATH; +const webKeyPath = process.env.STAGED_WEB_KEY_PATH; +const webHost = process.env.STAGED_WEB_HOST; + +function requireWebPath(name: string, value: string | undefined): string { + if (!value) { + throw new Error(`${name} must be set to enable HTTPS web mode`); + } + return resolve(value); +} + +const webHttps = + webCertPath || webKeyPath + ? { + cert: readFileSync(requireWebPath('STAGED_WEB_CERT_PATH', webCertPath)), + key: readFileSync(requireWebPath('STAGED_WEB_KEY_PATH', webKeyPath)), + } + : undefined; // https://vite.dev/config/ export default defineConfig({ @@ -15,7 +33,19 @@ export default defineConfig({ }, plugins: [svelte()], server: { + // Network access (0.0.0.0) is enabled via `--host` in `just dev-web`. + // Default `dev` stays on localhost to avoid exposing the dev server. port, strictPort: true, + https: webHttps, + allowedHosts: webHost ? [webHost] : undefined, + proxy: { + '/api': { + target: `${webHttps ? 'https' : 'http'}://localhost:5175`, + changeOrigin: true, + secure: false, + ws: true, // WebSocket proxy for /api/events + }, + }, }, }); From 075b8bfc0618f77d8bbcc51aaf8e3240cb948773 Mon Sep 17 00:00:00 2001 From: Matt Toohey Date: Fri, 15 May 2026 17:30:38 +1000 Subject: [PATCH 2/2] feat(staged): add SWR cache with Page Lifecycle revalidation for web mode Squash of local branch reload-when-i-leave-web. Adds a stale-while-revalidate cache layer over Tauri commands so the frontend can serve persisted data immediately on cold start and revalidate in the background, and refresh automatically when a backgrounded web tab resumes from a Page Lifecycle freeze. Highlights: - Service worker and web app manifest for PWA support, with automated cache version generation in vite config - Core SWR cache infrastructure backed by IndexedDB, including LRU eviction for size management - Tier 1/2/3 command wiring with post-mutation invalidation, branch-scoped diff cache invalidation, and event-driven cache invalidation for real-time updates - Page Lifecycle listener that emits cache-stale events on resume, plus a cache-invalidation listener and component-level handlers that re-fetch on cache-stale (BranchCard, project home, sessions, etc.) - Cached commands return { data, revalidating } so callers can render stale data while a background refresh runs - Fixes: markAllStale yields stale data before revalidating, side-effectful refresh commands skip caching, in-flight reads no longer repopulate invalidated cache, session message cache bypass on completion, timeline cleanup promise chaining, awaited cache invalidation after mutations Co-Authored-By: Claude Opus 4.7 (1M context) Signed-off-by: Matt Toohey --- apps/staged/index.html | 4 + apps/staged/package.json | 2 + apps/staged/public/manifest.json | 15 + apps/staged/src/App.svelte | 8 + apps/staged/src/lib/cache.test.ts | 456 ++++++++++++++++++ apps/staged/src/lib/cache.ts | 289 +++++++++++ apps/staged/src/lib/commands.test.ts | 116 +++++ apps/staged/src/lib/commands.ts | 126 +++-- .../src/lib/features/agents/agent.svelte.ts | 9 +- .../lib/features/branches/BranchCard.svelte | 11 + .../branches/BranchCardPrButton.svelte | 4 +- .../features/diff/diffViewerState.svelte.ts | 39 +- .../lib/features/layout/navigation.svelte.ts | 2 +- .../lib/features/projects/ProjectHome.svelte | 227 +++++---- .../lib/features/projects/ProjectsList.svelte | 92 +++- .../lib/features/sessions/SessionModal.svelte | 18 +- .../settings/ActionsSettingsPanel.svelte | 4 +- .../lib/features/timeline/liveSessionHints.ts | 2 +- .../listeners/cacheInvalidationListener.ts | 54 +++ .../listeners/pageLifecycleListener.test.ts | 136 ++++++ .../lib/listeners/pageLifecycleListener.ts | 60 +++ .../lib/listeners/sessionStatusListener.ts | 9 +- apps/staged/src/main.ts | 10 + apps/staged/src/service-worker.js | 71 +++ apps/staged/vite.config.ts | 112 ++++- pnpm-lock.yaml | 31 +- 26 files changed, 1726 insertions(+), 181 deletions(-) create mode 100644 apps/staged/public/manifest.json create mode 100644 apps/staged/src/lib/cache.test.ts create mode 100644 apps/staged/src/lib/cache.ts create mode 100644 apps/staged/src/lib/listeners/cacheInvalidationListener.ts create mode 100644 apps/staged/src/lib/listeners/pageLifecycleListener.test.ts create mode 100644 apps/staged/src/lib/listeners/pageLifecycleListener.ts create mode 100644 apps/staged/src/service-worker.js diff --git a/apps/staged/index.html b/apps/staged/index.html index ac6d68465..bad3ce586 100644 --- a/apps/staged/index.html +++ b/apps/staged/index.html @@ -3,6 +3,10 @@ + + + + void) | undefined; let unregisterShortcuts: (() => void) | null = null; let stopUpdaterLoop: (() => void) | null = null; let storeIncompat = $state(null); @@ -258,6 +262,8 @@ // Global session-status listener — must live at App level so it works // regardless of which view the user is on. See sessionStatusListener.ts. unlistenSessionStatus = await listenForSessionStatus(); + unlistenCacheInvalidation = await listenForCacheInvalidation(); + unlistenPageLifecycle = listenForPageLifecycle(); try { await initPreferences(); @@ -427,6 +433,8 @@ unlistenZoomOut?.(); unlistenZoomReset?.(); unlistenSessionStatus?.(); + unlistenCacheInvalidation?.(); + unlistenPageLifecycle?.(); stopUpdaterLoop?.(); }); diff --git a/apps/staged/src/lib/cache.test.ts b/apps/staged/src/lib/cache.test.ts new file mode 100644 index 000000000..6dcb2e083 --- /dev/null +++ b/apps/staged/src/lib/cache.test.ts @@ -0,0 +1,456 @@ +import 'fake-indexeddb/auto'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; + +// Mock transport — web mode (isTauri = false) with controllable invokeCommand +const mockInvoke = vi.fn(); +vi.mock('./transport', () => ({ + isTauri: false, + invokeCommand: (...args: unknown[]) => mockInvoke(...args), +})); + +import { + cachedInvoke, + cachedCommand, + invalidateCache, + invalidateCacheByArgs, + invalidateCacheByCommand, + markAllStale, + clearAllCache, + _cacheKey, + _CACHE_SCHEMA_VERSION, + _MAX_CACHE_ENTRIES, + _evictIfNeeded, +} from './cache'; + +beforeEach(async () => { + mockInvoke.mockReset(); + await clearAllCache(); +}); + +describe('cacheKey', () => { + it('produces deterministic keys regardless of arg order', () => { + expect(_cacheKey('cmd', { b: 2, a: 1 })).toBe(_cacheKey('cmd', { a: 1, b: 2 })); + }); + + it('produces different keys for different commands', () => { + expect(_cacheKey('foo', { a: 1 })).not.toBe(_cacheKey('bar', { a: 1 })); + }); + + it('handles undefined args', () => { + expect(_cacheKey('cmd')).toBe('cmd:'); + }); +}); + +describe('cachedInvoke', () => { + it('yields only network result on cache miss', async () => { + mockInvoke.mockResolvedValue({ items: [1, 2] }); + + const results = []; + for await (const r of cachedInvoke('list', undefined, { ttl: 60_000 })) { + results.push(r); + } + + expect(results).toEqual([{ data: { items: [1, 2] }, source: 'network' }]); + expect(mockInvoke).toHaveBeenCalledWith('list', undefined); + }); + + it('short-circuits with only cache when entry is fresh', async () => { + mockInvoke.mockResolvedValue('first'); + + // Prime the cache + const primeResults = []; + for await (const r of cachedInvoke('cmd', { id: '1' }, { ttl: 60_000 })) { + primeResults.push(r); + } + expect(primeResults).toHaveLength(1); + + // Second call within TTL should yield only cache (no network call) + mockInvoke.mockResolvedValue('second'); + const results = []; + for await (const r of cachedInvoke('cmd', { id: '1' }, { ttl: 60_000 })) { + results.push(r); + } + + expect(results).toEqual([{ data: 'first', source: 'cache' }]); + expect(mockInvoke).toHaveBeenCalledTimes(1); // no second network call + }); + + it('yields expired cache then revalidates from network', async () => { + mockInvoke.mockResolvedValue('data'); + + // Prime with ttl=1ms + for await (const _ of cachedInvoke('cmd', undefined, { ttl: 1 })) { + /* consume */ + } + + // Wait for expiry + await new Promise((r) => setTimeout(r, 5)); + + mockInvoke.mockResolvedValue('fresh'); + const results = []; + for await (const r of cachedInvoke('cmd', undefined, { ttl: 1 })) { + results.push(r); + } + + // Expired entry is still usable — yield stale cache, then network + expect(results).toEqual([ + { data: 'data', source: 'cache' }, + { data: 'fresh', source: 'network' }, + ]); + }); + + it('swallows network errors when usable stale cache exists', async () => { + mockInvoke.mockResolvedValue('cached-data'); + + // Prime + for await (const _ of cachedInvoke('cmd', undefined, { ttl: 60_000 })) { + /* consume */ + } + + // Mark stale so revalidation is attempted + await markAllStale(); + + // Network fails on second call + mockInvoke.mockRejectedValue(new Error('offline')); + const results = []; + for await (const r of cachedInvoke('cmd', undefined, { ttl: 60_000 })) { + results.push(r); + } + + // Stale cache is still served despite network failure + expect(results).toEqual([{ data: 'cached-data', source: 'cache' }]); + }); + + it('throws network errors when no valid cache exists', async () => { + mockInvoke.mockRejectedValue(new Error('offline')); + + const results = []; + let thrown: Error | undefined; + try { + for await (const r of cachedInvoke('cmd', undefined, { ttl: 60_000 })) { + results.push(r); + } + } catch (e) { + thrown = e as Error; + } + + expect(thrown?.message).toBe('offline'); + expect(results).toEqual([]); + }); +}); + +describe('cachedCommand', () => { + it('returns network result with no revalidation on cache miss', async () => { + mockInvoke.mockResolvedValue('value'); + + const result = await cachedCommand('cmd', undefined, { ttl: 60_000 }); + + expect(result.data).toBe('value'); + expect(result.revalidating).toBeNull(); + }); + + it('returns only cached data when entry is fresh', async () => { + mockInvoke.mockResolvedValue('v1'); + await cachedCommand('cmd', undefined, { ttl: 60_000 }); + + mockInvoke.mockResolvedValue('v2'); + const result = await cachedCommand('cmd', undefined, { ttl: 60_000 }); + + // Fresh entry short-circuits — no network call + expect(result.data).toBe('v1'); + expect(result.revalidating).toBeNull(); + expect(mockInvoke).toHaveBeenCalledTimes(1); + }); + + it('returns stale data with a revalidating promise that resolves to fresh', async () => { + mockInvoke.mockResolvedValue('cached'); + await cachedCommand('cmd', undefined, { ttl: 60_000 }); + + await markAllStale(); + + mockInvoke.mockResolvedValue('fresh'); + const result = await cachedCommand('cmd', undefined, { ttl: 60_000 }); + + expect(result.data).toBe('cached'); + expect(result.revalidating).not.toBeNull(); + await expect(result.revalidating).resolves.toBe('fresh'); + }); + + it('keeps revalidating promise resolving to cached data when network fails', async () => { + mockInvoke.mockResolvedValue('cached-data'); + await cachedCommand('cmd', undefined, { ttl: 60_000 }); + + await markAllStale(); + + mockInvoke.mockRejectedValue(new Error('offline')); + const result = await cachedCommand('cmd', undefined, { ttl: 60_000 }); + + expect(result.data).toBe('cached-data'); + await expect(result.revalidating).resolves.toBe('cached-data'); + }); + + it('throws on miss when the network fails', async () => { + mockInvoke.mockRejectedValue(new Error('offline')); + await expect(cachedCommand('cmd', undefined, { ttl: 60_000 })).rejects.toThrow('offline'); + }); +}); + +describe('invalidateCache', () => { + it('removes a specific entry so next call is a miss', async () => { + mockInvoke.mockResolvedValue('data'); + await cachedCommand('cmd', { id: '1' }, { ttl: 60_000 }); + + await invalidateCache('cmd', { id: '1' }); + + mockInvoke.mockResolvedValue('fresh'); + const results = []; + for await (const r of cachedInvoke('cmd', { id: '1' }, { ttl: 60_000 })) { + results.push(r); + } + + // Only network, no cache hit + expect(results).toEqual([{ data: 'fresh', source: 'network' }]); + }); +}); + +describe('invalidateCacheByCommand', () => { + it('removes all entries for a command', async () => { + mockInvoke.mockResolvedValue('a'); + await cachedCommand('cmd', { id: '1' }, { ttl: 60_000 }); + mockInvoke.mockResolvedValue('b'); + await cachedCommand('cmd', { id: '2' }, { ttl: 60_000 }); + + await invalidateCacheByCommand('cmd'); + + mockInvoke.mockResolvedValue('fresh'); + const results = []; + for await (const r of cachedInvoke('cmd', { id: '1' }, { ttl: 60_000 })) { + results.push(r); + } + expect(results).toEqual([{ data: 'fresh', source: 'network' }]); + }); + + it('does not affect other commands', async () => { + mockInvoke.mockResolvedValue('keep'); + await cachedCommand('other', undefined, { ttl: 60_000 }); + + await invalidateCacheByCommand('cmd'); + + mockInvoke.mockResolvedValue('new'); + const results = []; + for await (const r of cachedInvoke('other', undefined, { ttl: 60_000 })) { + results.push(r); + } + // Should still have cache hit (fresh, so no network call) + expect(results).toEqual([{ data: 'keep', source: 'cache' }]); + }); +}); + +describe('invalidateCacheByArgs', () => { + it('removes matching entries for the same command and branchId', async () => { + mockInvoke.mockResolvedValue('branch-a-head'); + await cachedCommand( + 'get_diff_files', + { branchId: 'branch-a', commitSha: undefined, scope: 'branch' }, + { ttl: 60_000 } + ); + mockInvoke.mockResolvedValue('branch-a-commit'); + await cachedCommand( + 'get_diff_files', + { branchId: 'branch-a', commitSha: 'abc123', scope: 'commit' }, + { ttl: 60_000 } + ); + + await invalidateCacheByArgs('get_diff_files', { branchId: 'branch-a' }); + + mockInvoke.mockResolvedValue('fresh'); + const headResults = []; + for await (const r of cachedInvoke( + 'get_diff_files', + { branchId: 'branch-a', commitSha: undefined, scope: 'branch' }, + { ttl: 60_000 } + )) { + headResults.push(r); + } + + const commitResults = []; + for await (const r of cachedInvoke( + 'get_diff_files', + { branchId: 'branch-a', commitSha: 'abc123', scope: 'commit' }, + { ttl: 60_000 } + )) { + commitResults.push(r); + } + + expect(headResults).toEqual([{ data: 'fresh', source: 'network' }]); + expect(commitResults).toEqual([{ data: 'fresh', source: 'network' }]); + }); + + it('keeps entries for other branches cached', async () => { + mockInvoke.mockResolvedValue('branch-a'); + await cachedCommand( + 'get_diff_files', + { branchId: 'branch-a', commitSha: 'abc123', scope: 'commit' }, + { ttl: 60_000 } + ); + mockInvoke.mockResolvedValue('branch-b'); + await cachedCommand( + 'get_diff_files', + { branchId: 'branch-b', commitSha: 'def456', scope: 'commit' }, + { ttl: 60_000 } + ); + + await invalidateCacheByArgs('get_diff_files', { branchId: 'branch-a' }); + + mockInvoke.mockResolvedValue('fresh'); + const results = []; + for await (const r of cachedInvoke( + 'get_diff_files', + { branchId: 'branch-b', commitSha: 'def456', scope: 'commit' }, + { ttl: 60_000 } + )) { + results.push(r); + } + + expect(results).toEqual([{ data: 'branch-b', source: 'cache' }]); + }); + + it('keeps entries for other commands cached', async () => { + mockInvoke.mockResolvedValue('diff'); + await cachedCommand( + 'get_diff_files', + { branchId: 'branch-a', commitSha: 'abc123', scope: 'commit' }, + { ttl: 60_000 } + ); + mockInvoke.mockResolvedValue('messages'); + await cachedCommand('get_session_messages', { branchId: 'branch-a' }, { ttl: 60_000 }); + + await invalidateCacheByArgs('get_diff_files', { branchId: 'branch-a' }); + + mockInvoke.mockResolvedValue('fresh'); + const results = []; + for await (const r of cachedInvoke( + 'get_session_messages', + { branchId: 'branch-a' }, + { ttl: 60_000 } + )) { + results.push(r); + } + + expect(results).toEqual([{ data: 'messages', source: 'cache' }]); + }); + + it('matches entries with optional args missing when branchId matches', async () => { + mockInvoke.mockResolvedValue('branch-diff'); + await cachedCommand( + 'get_diff_files', + { branchId: 'branch-a', scope: 'branch' }, + { ttl: 60_000 } + ); + + await invalidateCacheByArgs('get_diff_files', { branchId: 'branch-a' }); + + mockInvoke.mockResolvedValue('fresh'); + const results = []; + for await (const r of cachedInvoke( + 'get_diff_files', + { branchId: 'branch-a', scope: 'branch' }, + { ttl: 60_000 } + )) { + results.push(r); + } + + expect(results).toEqual([{ data: 'fresh', source: 'network' }]); + }); +}); + +describe('markAllStale', () => { + it('yields stale cache first then revalidates from network', async () => { + mockInvoke.mockResolvedValue('cached'); + await cachedCommand('cmd', undefined, { ttl: 60_000 }); + + await markAllStale(); + + mockInvoke.mockResolvedValue('fresh'); + const results = []; + for await (const r of cachedInvoke('cmd', undefined, { ttl: 60_000 })) { + results.push(r); + } + + expect(results).toEqual([ + { data: 'cached', source: 'cache' }, + { data: 'fresh', source: 'network' }, + ]); + }); + + it('cachedCommand returns stale value with a revalidating promise resolving to fresh', async () => { + mockInvoke.mockResolvedValue('cached'); + await cachedCommand('cmd', undefined, { ttl: 60_000 }); + + await markAllStale(); + + mockInvoke.mockResolvedValue('fresh'); + const result = await cachedCommand('cmd', undefined, { ttl: 60_000 }); + + expect(result.data).toBe('cached'); + expect(result.revalidating).not.toBeNull(); + await expect(result.revalidating).resolves.toBe('fresh'); + }); + + it('revalidation clears the stale flag', async () => { + mockInvoke.mockResolvedValue('v1'); + await cachedCommand('cmd', undefined, { ttl: 60_000 }); + + await markAllStale(); + + // Revalidate — await the revalidation promise so the cache write completes + mockInvoke.mockResolvedValue('v2'); + const { revalidating } = await cachedCommand('cmd', undefined, { ttl: 60_000 }); + await revalidating; + + // Now the entry should be fresh — short-circuit with only cache + mockInvoke.mockResolvedValue('v3'); + const results = []; + for await (const r of cachedInvoke('cmd', undefined, { ttl: 60_000 })) { + results.push(r); + } + + expect(results).toEqual([{ data: 'v2', source: 'cache' }]); + expect(mockInvoke).toHaveBeenCalledTimes(2); // v1 + v2, not v3 + }); +}); + +describe('evictIfNeeded', () => { + it('evicts oldest entries when cache exceeds MAX_CACHE_ENTRIES', async () => { + // Fill cache beyond the limit + const total = _MAX_CACHE_ENTRIES + 10; + for (let i = 0; i < total; i++) { + mockInvoke.mockResolvedValue(`value-${i}`); + await cachedCommand('cmd', { id: String(i) }, { ttl: 60_000 }); + } + + // Explicit eviction (also triggered by cacheSet, but let's verify directly) + await _evictIfNeeded(); + + // Verify: the oldest entries should have been evicted. + // The first 10 entries (id 0-9) should be gone; entries 10+ should remain. + mockInvoke.mockResolvedValue('new'); + + // Entry 0 should be a cache miss (evicted) + const missResults = []; + for await (const r of cachedInvoke('cmd', { id: '0' }, { ttl: 60_000 })) { + missResults.push(r); + } + expect(missResults).toEqual([{ data: 'new', source: 'network' }]); + + // Entry at the tail (most recent) should still be a cache hit + const hitResults = []; + for await (const r of cachedInvoke('cmd', { id: String(total - 1) }, { ttl: 60_000 })) { + hitResults.push(r); + } + expect(hitResults[0]).toEqual({ + data: `value-${total - 1}`, + source: 'cache', + }); + }); +}); diff --git a/apps/staged/src/lib/cache.ts b/apps/staged/src/lib/cache.ts new file mode 100644 index 000000000..ec49d5025 --- /dev/null +++ b/apps/staged/src/lib/cache.ts @@ -0,0 +1,289 @@ +import { get, set, del, keys, entries, clear, createStore } from 'idb-keyval'; +import { invokeCommand, isTauri } from './transport'; + +const CACHE_SCHEMA_VERSION = 1; +const MAX_CACHE_ENTRIES = 200; + +/** + * Tracks the last invalidation time per cache key. When a read starts, it + * captures the current epoch for that key. If the key is invalidated while + * the network request is in flight, the epoch advances and the stale write + * is skipped — preventing pre-mutation data from repopulating the cache. + */ +const invalidationEpochs = new Map(); + +function getEpoch(key: string): number { + return invalidationEpochs.get(key) ?? 0; +} + +function bumpEpoch(key: string): void { + invalidationEpochs.set(key, Date.now()); +} + +let cacheStore: ReturnType | undefined; + +function getStore() { + if (!cacheStore) { + cacheStore = createStore('staged-cache', 'responses'); + } + return cacheStore; +} + +interface CacheEntry { + key: string; + data: T; + fetchedAt: number; + schemaVersion: number; + stale?: boolean; +} + +export interface CacheConfig { + ttl: number; +} + +/** + * Result of a cached command call. + * + * `data` is the best value available immediately (cached if usable, otherwise + * from the network). When `revalidating` is non-null, a network fetch is in + * flight: await it to get the fresh value. Callers can render `data` instantly + * and then re-render once `revalidating` resolves. + */ +export interface SwrResult { + data: T; + revalidating: Promise | null; +} + +function cacheKey(command: string, args?: Record): string { + const argsStr = args ? JSON.stringify(args, Object.keys(args).sort()) : ''; + return `${command}:${argsStr}`; +} + +/** + * Stale-while-revalidate wrapper around invokeCommand. + * + * Yields: + * 1. Cached data (if available and schema matches) — instant + * 2. Fresh network data — if cached data is stale or expired + * + * Fresh entries (within TTL and not marked stale) short-circuit with + * only the cache yield. If no cache exists, only the network result + * is yielded. + */ +export async function* cachedInvoke( + command: string, + args: Record | undefined, + config: CacheConfig +): AsyncGenerator<{ data: T; source: 'cache' | 'network' }> { + if (isTauri) { + const data = await invokeCommand(command, args); + yield { data, source: 'network' }; + return; + } + + const key = cacheKey(command, args); + const store = getStore(); + + const entry = await get>(key, store).catch(() => undefined); + const isUsable = entry != null && entry.schemaVersion === CACHE_SCHEMA_VERSION; + const isFresh = isUsable && !entry.stale && Date.now() - entry.fetchedAt < config.ttl; + + if (isUsable) { + yield { data: entry.data, source: 'cache' }; + } + + if (isFresh) return; + + const epochAtStart = getEpoch(key); + + try { + const data = await invokeCommand(command, args); + // Skip the cache write if the key was invalidated while we were fetching — + // writing would repopulate the cache with pre-mutation data. + if (getEpoch(key) === epochAtStart) { + await cacheSet(key, { + key, + data, + fetchedAt: Date.now(), + schemaVersion: CACHE_SCHEMA_VERSION, + } satisfies CacheEntry); + } + yield { data, source: 'network' }; + } catch (err) { + if (!isUsable) throw err; + console.warn(`[cache] Network error for ${command}, serving stale cache`, err); + } +} + +/** + * Like invokeCommand, but with SWR caching. + * + * Returns `{ data, revalidating }`: + * - `data` is the best value available immediately (cached if usable, else network). + * - `revalidating` is non-null when a background network fetch is in flight; + * callers can await it to get the fresh value. + */ +export async function cachedCommand( + command: string, + args: Record | undefined, + config: CacheConfig +): Promise> { + if (isTauri) { + const data = await invokeCommand(command, args); + return { data, revalidating: null }; + } + + const key = cacheKey(command, args); + const store = getStore(); + + const entry = await get>(key, store).catch(() => undefined); + const isUsable = entry != null && entry.schemaVersion === CACHE_SCHEMA_VERSION; + const isFresh = isUsable && !entry.stale && Date.now() - entry.fetchedAt < config.ttl; + + if (isUsable && isFresh) { + return { data: entry.data, revalidating: null }; + } + + const epochAtStart = getEpoch(key); + const network = invokeCommand(command, args).then(async (data) => { + // Skip the cache write if the key was invalidated while we were fetching — + // writing would repopulate the cache with pre-mutation data. + if (getEpoch(key) === epochAtStart) { + await cacheSet(key, { + key, + data, + fetchedAt: Date.now(), + schemaVersion: CACHE_SCHEMA_VERSION, + } satisfies CacheEntry); + } + return data; + }); + + if (isUsable) { + // Stale/expired but usable — return cached data immediately and let the + // caller await revalidation. Swallow network errors so the stale entry + // remains the resolved value (mirrors cachedInvoke behavior). + const revalidating = network.catch((err) => { + console.warn(`[cache] Network error for ${command}, serving stale cache`, err); + return entry.data; + }); + return { data: entry.data, revalidating }; + } + + // Miss — must await the network before we can return anything usable. + const data = await network; + return { data, revalidating: null }; +} + +/** + * Evict the oldest cache entries (by fetchedAt) until the store is under the + * MAX_CACHE_ENTRIES limit. Called after writes and on quota errors. + */ +async function evictIfNeeded(): Promise { + try { + const store = getStore(); + const allEntries = await entries>(store); + if (allEntries.length <= MAX_CACHE_ENTRIES) return; + + // Sort by fetchedAt ascending (oldest first) and evict the excess + const sorted = allEntries.sort((a, b) => a[1].fetchedAt - b[1].fetchedAt); + const toEvict = sorted.slice(0, sorted.length - MAX_CACHE_ENTRIES); + await Promise.all(toEvict.map(([k]) => del(k, store))); + } catch { + // Best-effort eviction — don't let this block the caller + } +} + +/** + * Write a cache entry, with quota-error recovery via LRU eviction. + */ +async function cacheSet(key: string, entry: CacheEntry): Promise { + const store = getStore(); + try { + await set(key, entry, store); + } catch (err) { + // On quota error, evict old entries and retry once + if (err instanceof DOMException && err.name === 'QuotaExceededError') { + await evictIfNeeded(); + await set(key, entry, store).catch(() => {}); + return; + } + // Swallow other write errors — cache is best-effort + } + // Proactive eviction after successful writes + evictIfNeeded(); +} + +/** Invalidate a specific cache entry. */ +export async function invalidateCache( + command: string, + args?: Record +): Promise { + if (isTauri) return; + const key = cacheKey(command, args); + bumpEpoch(key); + await del(key, getStore()).catch(() => {}); +} + +/** Invalidate all entries for a command (regardless of args). */ +export async function invalidateCacheByCommand(command: string): Promise { + if (isTauri) return; + const store = getStore(); + const allKeys = await keys(store); + const matching = allKeys.filter((k) => k.startsWith(`${command}:`)); + matching.forEach((k) => bumpEpoch(k)); + await Promise.all(matching.map((k) => del(k, store))); +} + +function parseCacheArgs(key: string, command: string): Record | undefined { + const prefix = `${command}:`; + if (!key.startsWith(prefix)) return undefined; + + try { + const parsed = JSON.parse(key.slice(prefix.length)) as unknown; + if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) return undefined; + return parsed as Record; + } catch { + return undefined; + } +} + +/** Invalidate entries for a command whose cached args include all partial args. */ +export async function invalidateCacheByArgs( + command: string, + partialArgs: Record +): Promise { + if (isTauri) return; + const store = getStore(); + const allKeys = await keys(store); + const matching = allKeys.filter((key) => { + const args = parseCacheArgs(key, command); + if (!args) return false; + + return Object.entries(partialArgs).every(([argKey, argValue]) => args[argKey] === argValue); + }); + matching.forEach((k) => bumpEpoch(k)); + await Promise.all(matching.map((k) => del(k, store))); +} + +/** Mark all entries as stale so SWR serves them while revalidating. */ +export async function markAllStale(): Promise { + if (isTauri) return; + const store = getStore(); + const allEntries = await entries>(store); + await Promise.all(allEntries.map(([k, entry]) => set(k, { ...entry, stale: true }, store))); +} + +/** Remove all cached entries. */ +export async function clearAllCache(): Promise { + if (isTauri) return; + await clear(getStore()); +} + +// Exported for testing +export { + cacheKey as _cacheKey, + CACHE_SCHEMA_VERSION as _CACHE_SCHEMA_VERSION, + MAX_CACHE_ENTRIES as _MAX_CACHE_ENTRIES, + evictIfNeeded as _evictIfNeeded, +}; diff --git a/apps/staged/src/lib/commands.test.ts b/apps/staged/src/lib/commands.test.ts index 7789947c4..4efccb22f 100644 --- a/apps/staged/src/lib/commands.test.ts +++ b/apps/staged/src/lib/commands.test.ts @@ -7,6 +7,8 @@ describe('browser-native command wrappers', () => { afterEach(() => { vi.unstubAllGlobals(); + vi.doUnmock('./transport'); + vi.doUnmock('./cache'); }); it('opens URLs with browser navigation in web mode', async () => { @@ -61,3 +63,117 @@ describe('browser-native command wrappers', () => { expect(fetch).not.toHaveBeenCalled(); }); }); + +describe('cached mutation command wrappers', () => { + function deferred() { + let resolve!: () => void; + const promise = new Promise((res) => { + resolve = res; + }); + return { promise, resolve }; + } + + let invokeCommand: ReturnType; + let cachedCommand: ReturnType; + let invalidateCache: ReturnType; + let invalidateCacheByCommand: ReturnType; + + beforeEach(() => { + vi.resetModules(); + invokeCommand = vi.fn(); + cachedCommand = vi.fn(); + invalidateCache = vi.fn(); + invalidateCacheByCommand = vi.fn(); + + vi.doMock('./transport', () => ({ + isTauri: false, + invokeCommand, + })); + vi.doMock('./cache', () => ({ + cachedCommand, + cachedInvoke: vi.fn(), + invalidateCache, + invalidateCacheByCommand, + })); + }); + + afterEach(() => { + vi.doUnmock('./transport'); + vi.doUnmock('./cache'); + }); + + it('waits for repo list invalidation before resolving addProjectRepo', async () => { + const repo = { id: 'repo-1' }; + const invalidated = deferred(); + invokeCommand.mockResolvedValue(repo); + invalidateCache.mockReturnValue(invalidated.promise); + + const { addProjectRepo } = await import('./commands'); + + let settled = false; + const result = addProjectRepo('project-1', 'block/builderbot').then((value) => { + settled = true; + return value; + }); + + await Promise.resolve(); + await Promise.resolve(); + + expect(invalidateCache).toHaveBeenCalledWith('list_project_repos', { projectId: 'project-1' }); + expect(settled).toBe(false); + + invalidated.resolve(); + + await expect(result).resolves.toBe(repo); + }); + + it('waits for all project cache invalidations before resolving deleteProject', async () => { + const projectsInvalidated = deferred(); + const branchesInvalidated = deferred(); + const reposInvalidated = deferred(); + invokeCommand.mockResolvedValue(undefined); + invalidateCacheByCommand + .mockReturnValueOnce(projectsInvalidated.promise) + .mockReturnValueOnce(branchesInvalidated.promise) + .mockReturnValueOnce(reposInvalidated.promise); + + const { deleteProject } = await import('./commands'); + + let settled = false; + const result = deleteProject('project-1').then(() => { + settled = true; + }); + + await Promise.resolve(); + await Promise.resolve(); + + expect(invalidateCacheByCommand.mock.calls).toEqual([ + ['list_projects'], + ['list_branches_for_project'], + ['list_project_repos'], + ]); + expect(settled).toBe(false); + + projectsInvalidated.resolve(); + branchesInvalidated.resolve(); + await Promise.resolve(); + expect(settled).toBe(false); + + reposInvalidated.resolve(); + + await expect(result).resolves.toBeUndefined(); + }); + + it('bypasses the SWR cache when fetching fresh session messages', async () => { + const messages = [{ id: 1, sessionId: 'session-1', role: 'assistant', content: 'done' }]; + invokeCommand.mockResolvedValue(messages); + + const { getFreshSessionMessages } = await import('./commands'); + + await expect(getFreshSessionMessages('session-1')).resolves.toBe(messages); + expect(invokeCommand).toHaveBeenCalledWith('get_session_messages', { + sessionId: 'session-1', + }); + expect(cachedCommand).not.toHaveBeenCalled(); + }); +}); diff --git a/apps/staged/src/lib/commands.ts b/apps/staged/src/lib/commands.ts index e544eeb70..9f857d801 100644 --- a/apps/staged/src/lib/commands.ts +++ b/apps/staged/src/lib/commands.ts @@ -5,6 +5,13 @@ */ import { invokeCommand, isTauri } from './transport'; +import { + cachedCommand, + cachedInvoke, + invalidateCacheByCommand, + invalidateCache, + type SwrResult, +} from './cache'; import type { Project, ProjectRepo, @@ -68,11 +75,11 @@ export function confirmResetStore(): Promise { // Projects // ============================================================================= -export function listProjects(): Promise { - return invokeCommand('list_projects'); +export function listProjects(): Promise> { + return cachedCommand('list_projects', undefined, { ttl: 5 * 60_000 }); } -export function createProject( +export async function createProject( name: string, location: 'local' | 'remote', githubRepo?: string, @@ -82,7 +89,7 @@ export function createProject( defaultBranch?: string, headRepo?: string ): Promise { - return invokeCommand('create_project', { + const project = await invokeCommand('create_project', { name, location, githubRepo: githubRepo ?? null, @@ -92,21 +99,28 @@ export function createProject( defaultBranch: defaultBranch ?? null, headRepo: headRepo ?? null, }); + await invalidateCacheByCommand('list_projects'); + return project; } -export function deleteProject(id: string): Promise { - return invokeCommand('delete_project', { id }); +export async function deleteProject(id: string): Promise { + await invokeCommand('delete_project', { id }); + await Promise.all([ + invalidateCacheByCommand('list_projects'), + invalidateCacheByCommand('list_branches_for_project'), + invalidateCacheByCommand('list_project_repos'), + ]); } -export function listProjectRepos(projectId: string): Promise { - return invokeCommand('list_project_repos', { projectId }); +export function listProjectRepos(projectId: string): Promise> { + return cachedCommand('list_project_repos', { projectId }, { ttl: 10 * 60_000 }); } export function listRecentRepos(limit?: number): Promise { return invokeCommand('list_recent_repos', { limit: limit ?? 10 }); } -export function addProjectRepo( +export async function addProjectRepo( projectId: string, githubRepo: string, branchName?: string, @@ -116,7 +130,7 @@ export function addProjectRepo( defaultBranch?: string, headRepo?: string ): Promise { - return invokeCommand('add_project_repo', { + const repo = await invokeCommand('add_project_repo', { projectId, githubRepo, branchName: branchName ?? null, @@ -126,6 +140,8 @@ export function addProjectRepo( defaultBranch: defaultBranch ?? null, headRepo: headRepo ?? null, }); + await invalidateCache('list_project_repos', { projectId }); + return repo; } export function updateProjectRepoBranchName( @@ -136,12 +152,17 @@ export function updateProjectRepoBranchName( return invokeCommand('update_project_repo_branch_name', { projectId, projectRepoId, branchName }); } -export function removeProjectRepo(projectId: string, projectRepoId: string): Promise { - return invokeCommand('remove_project_repo', { projectId, projectRepoId }); +export async function removeProjectRepo(projectId: string, projectRepoId: string): Promise { + await invokeCommand('remove_project_repo', { projectId, projectRepoId }); + await invalidateCache('list_project_repos', { projectId }); } -export function setPrimaryProjectRepo(projectId: string, projectRepoId: string): Promise { - return invokeCommand('set_primary_project_repo', { projectId, projectRepoId }); +export async function setPrimaryProjectRepo( + projectId: string, + projectRepoId: string +): Promise { + await invokeCommand('set_primary_project_repo', { projectId, projectRepoId }); + await invalidateCache('list_project_repos', { projectId }); } export function clearProjectRepoReason(projectRepoId: string): Promise { @@ -239,8 +260,8 @@ export function startProjectSession( // Branches // ============================================================================= -export function listBranchesForProject(projectId: string): Promise { - return invokeCommand('list_branches_for_project', { projectId }); +export function listBranchesForProject(projectId: string): Promise> { + return cachedCommand('list_branches_for_project', { projectId }, { ttl: 2 * 60_000 }); } /** Get a single branch by ID. */ @@ -251,13 +272,20 @@ export function getBranch(branchId: string): Promise { /** Create a local branch record (DB only — no git worktree yet). * Returns immediately with worktreePath = null. * Call `setupWorktree` separately to create the git worktree. */ -export function createBranch( +export async function createBranch( projectId: string, branchName: string, baseBranch?: string, projectRepoId?: string ): Promise { - return invokeCommand('create_branch', { projectId, branchName, baseBranch, projectRepoId }); + const branch = await invokeCommand('create_branch', { + projectId, + branchName, + baseBranch, + projectRepoId, + }); + await invalidateCacheByCommand('list_branches_for_project'); + return branch; } /** Create the git worktree for a local branch and record its workdir. @@ -318,8 +346,12 @@ export function resumeWorkspace(workspaceName: string): Promise { return invokeCommand('resume_workspace', { workspaceName }); } -export function deleteBranch(branchId: string): Promise { - return invokeCommand('delete_branch', { branchId }); +export async function deleteBranch(branchId: string): Promise { + await invokeCommand('delete_branch', { branchId }); + await Promise.all([ + invalidateCacheByCommand('list_branches_for_project'), + invalidateCache('get_branch_timeline', { branchId }), + ]); } export function renameBranch(branchId: string, branchName: string): Promise { @@ -353,12 +385,14 @@ export function pollAllWorkspaceStatuses( // ============================================================================= const TIMELINE_FRESH_MS = 10_000; +const TIMELINE_CACHE_TTL = 30_000; const timelineCache = new Map(); const inFlightTimelines = new Map>(); export function invalidateBranchTimeline(branchId: string): void { timelineCache.delete(branchId); inFlightTimelines.delete(branchId); + invalidateCache('get_branch_timeline', { branchId }); window.dispatchEvent( new CustomEvent('timeline-invalidated', { detail: { branchIds: [branchId] } }) ); @@ -384,18 +418,30 @@ export function getBranchTimeline( } } - const request = invokeCommand('get_branch_timeline', { branchId }) - .then((timeline) => { - if (inFlightTimelines.get(branchId) === request) { + // Use cachedInvoke so IndexedDB serves data on cold start while the network + // fetch runs in parallel (SWR). The first yield may be cached; the last is + // always the freshest available value. + let request: Promise | undefined; + const timelineRequest = (async () => { + let timeline: BranchTimeline | undefined; + for await (const { data } of cachedInvoke( + 'get_branch_timeline', + { branchId }, + { ttl: TIMELINE_CACHE_TTL } + )) { + timeline = data; + if (request && inFlightTimelines.get(branchId) === request) { timelineCache.set(branchId, { timeline, fetchedAt: Date.now() }); } - return timeline; - }) - .finally(() => { - if (inFlightTimelines.get(branchId) === request) { - inFlightTimelines.delete(branchId); - } - }); + } + return timeline!; + })(); + + request = timelineRequest.finally(() => { + if (request && inFlightTimelines.get(branchId) === request) { + inFlightTimelines.delete(branchId); + } + }); inFlightTimelines.set(branchId, request); return request; @@ -421,6 +467,7 @@ export function invalidateProjectBranchTimelines(branchIds: string[]): void { for (const id of branchIds) { timelineCache.delete(id); inFlightTimelines.delete(id); + invalidateCache('get_branch_timeline', { branchId: id }); } window.dispatchEvent(new CustomEvent('timeline-invalidated', { detail: { branchIds } })); } @@ -570,8 +617,8 @@ export interface AcpProviderInfo { } /** Scan the system for installed ACP-compatible agents. */ -export function discoverAcpProviders(): Promise { - return invokeCommand('discover_acp_providers'); +export function discoverAcpProviders(): Promise> { + return cachedCommand('discover_acp_providers', undefined, { ttl: 30 * 60_000 }); } // ============================================================================= @@ -582,7 +629,12 @@ export function getSession(sessionId: string): Promise { return invokeCommand('get_session', { sessionId }); } -export function getSessionMessages(sessionId: string): Promise { +export function getSessionMessages(sessionId: string): Promise> { + return cachedCommand('get_session_messages', { sessionId }, { ttl: 5 * 60_000 }); +} + +/** Fetch session messages without SWR cache, for terminal status handlers. */ +export function getFreshSessionMessages(sessionId: string): Promise { return invokeCommand('get_session_messages', { sessionId }); } @@ -741,8 +793,8 @@ export function getDiffFiles( branchId: string, commitSha?: string, scope: DiffScope = 'branch' -): Promise { - return invokeCommand('get_diff_files', { branchId, commitSha, scope }); +): Promise> { + return cachedCommand('get_diff_files', { branchId, commitSha, scope }, { ttl: 2 * 60_000 }); } /** Get the full diff content for a single file. */ @@ -751,8 +803,8 @@ export function getFileDiff( commitSha: string, scope: DiffScope, path: string -): Promise { - return invokeCommand('get_file_diff', { branchId, commitSha, scope, path }); +): Promise> { + return cachedCommand('get_file_diff', { branchId, commitSha, scope, path }, { ttl: 2 * 60_000 }); } /** Get file content at a specific ref (for reference files). */ diff --git a/apps/staged/src/lib/features/agents/agent.svelte.ts b/apps/staged/src/lib/features/agents/agent.svelte.ts index 07f8a8e04..27ce96a37 100644 --- a/apps/staged/src/lib/features/agents/agent.svelte.ts +++ b/apps/staged/src/lib/features/agents/agent.svelte.ts @@ -82,9 +82,16 @@ export const agentState = $state({ */ export async function refreshProviders(): Promise { try { - const providers = await discoverAcpProviders(); + const { data: providers, revalidating } = await discoverAcpProviders(); agentState.providers = providers; agentState.loaded = true; + if (revalidating) { + revalidating + .then((fresh) => { + agentState.providers = fresh; + }) + .catch((e) => console.error('Failed to revalidate ACP providers:', e)); + } return providers; } catch (e) { console.error('Failed to discover ACP providers:', e); diff --git a/apps/staged/src/lib/features/branches/BranchCard.svelte b/apps/staged/src/lib/features/branches/BranchCard.svelte index 7de3ebff9..8833cfbfb 100644 --- a/apps/staged/src/lib/features/branches/BranchCard.svelte +++ b/apps/staged/src/lib/features/branches/BranchCard.svelte @@ -730,6 +730,17 @@ return () => window.removeEventListener('project-notes-invalidated', handler); }); + // Re-fetch timeline when page resumes from a freeze (cache-stale event) + $effect(() => { + const handler = () => { + if (branchTimelineReadyKey(branch)) { + void loadTimeline(); + } + }; + window.addEventListener('cache-stale', handler); + return () => window.removeEventListener('cache-stale', handler); + }); + async function loadTimeline({ timelineKey = branchTimelineReadyKey(branch), force = false, diff --git a/apps/staged/src/lib/features/branches/BranchCardPrButton.svelte b/apps/staged/src/lib/features/branches/BranchCardPrButton.svelte index fc4c3c72f..5756225aa 100644 --- a/apps/staged/src/lib/features/branches/BranchCardPrButton.svelte +++ b/apps/staged/src/lib/features/branches/BranchCardPrButton.svelte @@ -434,7 +434,7 @@ try { if (status === 'completed' && sid) { - const messages = await commands.getSessionMessages(sid); + const messages = await commands.getFreshSessionMessages(sid); const foundUrl = extractPrUrl(messages); if (foundUrl) { @@ -507,7 +507,7 @@ } try { - const messages = await commands.getSessionMessages(sid); + const messages = await commands.getFreshSessionMessages(sid); const pipelineOutcome = classifyPipelinePushCompletion(pipeline, messages); if (pipelineOutcome) return pipelineOutcome; return classifyCompletedPushSession(pipeline, messages); diff --git a/apps/staged/src/lib/features/diff/diffViewerState.svelte.ts b/apps/staged/src/lib/features/diff/diffViewerState.svelte.ts index 16df8640f..11b503419 100644 --- a/apps/staged/src/lib/features/diff/diffViewerState.svelte.ts +++ b/apps/staged/src/lib/features/diff/diffViewerState.svelte.ts @@ -43,19 +43,26 @@ export function createDiffViewerState(branchId: string, scope: DiffScope, commit state.error = null; try { - const response = await commands.getDiffFiles( + const { data: response, revalidating } = await commands.getDiffFiles( state.branchId, state.commitSha ?? undefined, state.scope ); if (generation !== contextGeneration) return; - state.commitSha = response.commitSha; - state.files = response.files; - + applyDiffFilesResponse(response); if (state.files.length > 0) { await selectFile(sharedFileSummaryPath(state.files[0])); } + if (generation === contextGeneration) { + state.loading = false; + } + + if (revalidating) { + const fresh = await revalidating; + if (generation !== contextGeneration) return; + applyDiffFilesResponse(fresh); + } } catch (e) { if (generation !== contextGeneration) return; state.error = e instanceof Error ? e.message : String(e); @@ -67,6 +74,11 @@ export function createDiffViewerState(branchId: string, scope: DiffScope, commit } } + function applyDiffFilesResponse(response: { commitSha: string; files: FileDiffSummary[] }) { + state.commitSha = response.commitSha; + state.files = response.files; + } + async function selectFile(path: string | null): Promise { const thisGeneration = ++selectionGeneration; state.selectedFile = path; @@ -84,12 +96,29 @@ export function createDiffViewerState(branchId: string, scope: DiffScope, commit if (cached) return cached; state.loadingFile = path; + const commitSha = state.commitSha; try { - const diff = await commands.getFileDiff(state.branchId, state.commitSha, state.scope, path); + const { data: diff, revalidating } = await commands.getFileDiff( + state.branchId, + commitSha, + state.scope, + path + ); const newCache = new Map(state.diffCache); newCache.set(path, diff); state.diffCache = newCache; + + if (revalidating) { + revalidating + .then((fresh) => { + if (state.commitSha !== commitSha) return; + const next = new Map(state.diffCache); + next.set(path, fresh); + state.diffCache = next; + }) + .catch(() => {}); + } return diff; } catch (e) { console.error(`Failed to load diff for ${path}:`, e); diff --git a/apps/staged/src/lib/features/layout/navigation.svelte.ts b/apps/staged/src/lib/features/layout/navigation.svelte.ts index d04b41981..18971838b 100644 --- a/apps/staged/src/lib/features/layout/navigation.svelte.ts +++ b/apps/staged/src/lib/features/layout/navigation.svelte.ts @@ -51,7 +51,7 @@ export async function initNavigation(): Promise { // Validate the project still exists before navigating to it try { - const projects = await commands.listProjects(); + const { data: projects } = await commands.listProjects(); projectsList.current = projects; const existingIds = new Set(projects.map((p) => p.id)); if (existingIds.has(lastProjectId)) { diff --git a/apps/staged/src/lib/features/projects/ProjectHome.svelte b/apps/staged/src/lib/features/projects/ProjectHome.svelte index 0dcb10f05..0cf798529 100644 --- a/apps/staged/src/lib/features/projects/ProjectHome.svelte +++ b/apps/staged/src/lib/features/projects/ProjectHome.svelte @@ -102,7 +102,9 @@ void projectRunActionsStore.startListening(); const onNewProject = () => handleNewProject(); + const onCacheStale = () => loadData(); window.addEventListener('staged:new-project', onNewProject); + window.addEventListener('cache-stale', onCacheStale); let unlistenDetection: (() => void) | undefined; listenToRepoActionsDetection((event) => { @@ -136,18 +138,18 @@ commands.listBranchesForProject(projectId), commands.listProjectRepos(projectId), ]); - setProjects(projectsList); - projects = projectsList; + setProjects(projectsList.data); + projects = projectsList.data; const mergedBranches = mergeBranchesPreservingWorktree( branchesByProject.get(projectId) || [], - branches + branches.data ); branchesByProject = new Map(branchesByProject).set(projectId, mergedBranches); commands.invalidateProjectBranchTimelines(mergedBranches.map((b) => b.id)); workspaceLifecycle.enqueueInitialSetup(projectId, mergedBranches); - replaceProjectRepos(projectId, repos); + replaceProjectRepos(projectId, repos.data); void repoBadgeStore.ensureForRepos( - repos.map((r) => ({ githubRepo: r.githubRepo, subpath: r.subpath })) + repos.data.map((r) => ({ githubRepo: r.githubRepo, subpath: r.subpath })) ); } catch (e) { console.error('[ProjectHome] Failed to refresh project after setup progress:', e); @@ -185,6 +187,7 @@ return () => { window.removeEventListener('staged:new-project', onNewProject); + window.removeEventListener('cache-stale', onCacheStale); unlistenDetection?.(); unlistenProjectRepoAdded?.(); unlistenPrStatus?.(); @@ -234,89 +237,135 @@ error = null; await repoBadgeStore.loadAll(); try { - const projectList = await commands.listProjects(); + const { data: initialProjectList, revalidating: projectsRevalidating } = + await commands.listProjects(); if (generation !== loadGeneration) return; - projects = projectList; - setProjects(projectList); + await applyProjectList(initialProjectList, generation); loading = false; - // Seed maps so project sections can render immediately. - const branchMap = new Map(); - for (const project of projectList) { - branchMap.set(project.id, branchesByProject.get(project.id) || []); + if (projectsRevalidating) { + try { + const fresh = await projectsRevalidating; + if (generation !== loadGeneration) return; + await applyProjectList(fresh, generation); + } catch (e) { + console.error('[ProjectHome] Failed to revalidate project list:', e); + } } - branchesByProject = branchMap; - - // Drop cached repos for projects that no longer exist. - const projectIds = new Set(projectList.map((p) => p.id)); - const prunedRepos = new Map(); - for (const [id, repo] of reposById) { - if (projectIds.has(repo.projectId)) prunedRepos.set(id, repo); + } catch (e) { + if (generation !== loadGeneration) return; + error = e instanceof Error ? e.message : String(e); + } finally { + if (generation === loadGeneration) { + loading = false; } - reposById = prunedRepos; - - await Promise.all( - projectList.map(async (project) => { - try { - const [branches, repos] = await Promise.all([ - commands.listBranchesForProject(project.id), - commands.listProjectRepos(project.id), - ]); - if (generation !== loadGeneration) return; - branchesByProject = new Map(branchesByProject).set(project.id, branches); - workspaceLifecycle.enqueueInitialSetup(project.id, branches); - replaceProjectRepos(project.id, repos); - - // On startup, drain queued sessions for branches that are already ready. - for (const branch of branches) { - const isLocalReady = branch.branchType === 'local' && branch.worktreePath; - const isRemoteReady = - branch.branchType === 'remote' && branch.workspaceStatus === 'running'; - if (isLocalReady || isRemoteReady) { - commands.drainQueuedSessions(branch.id).catch((e) => { - console.error('[ProjectHome] Failed to drain queued sessions on startup:', e); - }); - } - } - } catch (e) { - console.error(`[ProjectHome] Failed to hydrate project '${project.id}':`, e); + } + } + + /** + * Apply a list of projects fetched from the backend: seed branch/repo maps, + * hydrate per-project branches and repos (with SWR revalidation), and + * refresh action-detection state. Called once with the cached value and + * again if the network revalidation yields fresh data. + */ + async function applyProjectList(projectList: Project[], generation: number) { + projects = projectList; + setProjects(projectList); + + // Seed maps so project sections can render immediately. + const branchMap = new Map(); + for (const project of projectList) { + branchMap.set(project.id, branchesByProject.get(project.id) || []); + } + branchesByProject = branchMap; + + // Drop cached repos for projects that no longer exist. + const projectIds = new Set(projectList.map((p) => p.id)); + const prunedRepos = new Map(); + for (const [id, repo] of reposById) { + if (projectIds.has(repo.projectId)) prunedRepos.set(id, repo); + } + reposById = prunedRepos; + + await Promise.all( + projectList.map(async (project) => { + try { + const [branchesResult, reposResult] = await Promise.all([ + commands.listBranchesForProject(project.id), + commands.listProjectRepos(project.id), + ]); + if (generation !== loadGeneration) return; + applyProjectBranches(project.id, branchesResult.data, generation); + replaceProjectRepos(project.id, reposResult.data); + + if (branchesResult.revalidating) { + branchesResult.revalidating + .then((fresh) => applyProjectBranches(project.id, fresh, generation)) + .catch((e) => { + console.error( + `[ProjectHome] Failed to revalidate branches for '${project.id}':`, + e + ); + }); } - }) - ); + if (reposResult.revalidating) { + reposResult.revalidating + .then((fresh) => { + if (generation !== loadGeneration) return; + replaceProjectRepos(project.id, fresh); + }) + .catch((e) => { + console.error(`[ProjectHome] Failed to revalidate repos for '${project.id}':`, e); + }); + } + } catch (e) { + console.error(`[ProjectHome] Failed to hydrate project '${project.id}':`, e); + } + }) + ); - projectRunActionsStore.hydrateFromProjectBranches(branchesByProject).catch(console.error); + projectRunActionsStore.hydrateFromProjectBranches(branchesByProject).catch(console.error); - // Ensure badges exist for all loaded repos - const allRepos = [...reposById.values()].map((r) => ({ - githubRepo: r.githubRepo, - subpath: r.subpath, - })); - void repoBadgeStore.ensureForRepos(allRepos); + // Ensure badges exist for all loaded repos + const allRepos = [...reposById.values()].map((r) => ({ + githubRepo: r.githubRepo, + subpath: r.subpath, + })); + void repoBadgeStore.ensureForRepos(allRepos); - try { - const contexts = await commands.listActionContexts(); - if (generation !== loadGeneration) return; - detectingProjectIds = new Set( - projectList - .filter((project) => - contexts.some( - (context) => - context.detectingActions && - context.githubRepo === project.githubRepo && - context.subpath === project.subpath - ) + try { + const contexts = await commands.listActionContexts(); + if (generation !== loadGeneration) return; + detectingProjectIds = new Set( + projectList + .filter((project) => + contexts.some( + (context) => + context.detectingActions && + context.githubRepo === project.githubRepo && + context.subpath === project.subpath ) - .map((project) => project.id) - ); - } catch (e) { - console.error('[ProjectHome] Failed to load action contexts:', e); - } + ) + .map((project) => project.id) + ); } catch (e) { - if (generation !== loadGeneration) return; - error = e instanceof Error ? e.message : String(e); - } finally { - if (generation === loadGeneration) { - loading = false; + console.error('[ProjectHome] Failed to load action contexts:', e); + } + } + + function applyProjectBranches(projectId: string, branches: Branch[], generation: number) { + if (generation !== loadGeneration) return; + branchesByProject = new Map(branchesByProject).set(projectId, branches); + workspaceLifecycle.enqueueInitialSetup(projectId, branches); + + // On startup, drain queued sessions for branches that are already ready. + for (const branch of branches) { + const isLocalReady = branch.branchType === 'local' && branch.worktreePath; + const isRemoteReady = branch.branchType === 'remote' && branch.workspaceStatus === 'running'; + if (isLocalReady || isRemoteReady) { + commands.drainQueuedSessions(branch.id).catch((e) => { + console.error('[ProjectHome] Failed to drain queued sessions on startup:', e); + }); } } } @@ -420,9 +469,9 @@ commands.listBranchesForProject(project.id), commands.listProjectRepos(project.id), ]); - branchesByProject = new Map(branchesByProject).set(project.id, branches); - workspaceLifecycle.enqueueInitialSetup(project.id, branches); - replaceProjectRepos(project.id, repos); + branchesByProject = new Map(branchesByProject).set(project.id, branches.data); + workspaceLifecycle.enqueueInitialSetup(project.id, branches.data); + replaceProjectRepos(project.id, repos.data); } catch (e) { console.error('[ProjectHome] Failed to hydrate newly created project:', e); } @@ -534,18 +583,18 @@ commands.listBranchesForProject(projectId), commands.listProjectRepos(projectId), ]); - setProjects(projectsList); - projects = projectsList; + setProjects(projectsList.data); + projects = projectsList.data; const mergedBranches = mergeBranchesPreservingWorktree( branchesByProject.get(projectId) || [], - branches + branches.data ); branchesByProject = new Map(branchesByProject).set(projectId, mergedBranches); commands.invalidateProjectBranchTimelines(mergedBranches.map((b) => b.id)); workspaceLifecycle.enqueueInitialSetup(projectId, mergedBranches); - replaceProjectRepos(projectId, repos); + replaceProjectRepos(projectId, repos.data); void repoBadgeStore.ensureForRepos( - repos.map((r) => ({ githubRepo: r.githubRepo, subpath: r.subpath })) + repos.data.map((r) => ({ githubRepo: r.githubRepo, subpath: r.subpath })) ); } catch (e) { console.error('Failed to add repo:', e); @@ -612,10 +661,10 @@ commands.listBranchesForProject(branch.projectId), commands.listProjectRepos(branch.projectId), ]); - setProjects(projectsList); - projects = projectsList; - branchesByProject = new Map(branchesByProject).set(branch.projectId, branches); - replaceProjectRepos(branch.projectId, repos); + setProjects(projectsList.data); + projects = projectsList.data; + branchesByProject = new Map(branchesByProject).set(branch.projectId, branches.data); + replaceProjectRepos(branch.projectId, repos.data); } else { await commands.deleteBranch(branch.id); // Fallback for legacy branches without repo linkage diff --git a/apps/staged/src/lib/features/projects/ProjectsList.svelte b/apps/staged/src/lib/features/projects/ProjectsList.svelte index 983e06996..6e2ba56a7 100644 --- a/apps/staged/src/lib/features/projects/ProjectsList.svelte +++ b/apps/staged/src/lib/features/projects/ProjectsList.svelte @@ -289,9 +289,11 @@ deletingProjectNames = next; loadProjects(); }; + const onCacheStale = () => loadProjects(); window.addEventListener('staged:new-project', onNewProject); window.addEventListener('staged:project-delete-start', onProjectDeleteStart); window.addEventListener('staged:project-delete-end', onProjectDeleteEnd); + window.addEventListener('cache-stale', onCacheStale); // Listen for PR status changes to update branch state let unlistenPrStatus: UnlistenFn | undefined; @@ -325,6 +327,7 @@ window.removeEventListener('staged:new-project', onNewProject); window.removeEventListener('staged:project-delete-start', onProjectDeleteStart); window.removeEventListener('staged:project-delete-end', onProjectDeleteEnd); + window.removeEventListener('cache-stale', onCacheStale); unlistenPrStatus?.(); }; }); @@ -334,26 +337,15 @@ error = null; try { await repoBadgeStore.loadAll(); - const loadedProjects = await commands.listProjects(); - projects = loadedProjects; - setProjects(loadedProjects); - void hydrateRepos(loadedProjects); - // Load branches for each project to calculate PR status - const branchesMap = new Map(); - await Promise.all( - loadedProjects.map(async (project) => { - try { - const branches = await commands.listBranchesForProject(project.id); - branchesMap.set(project.id, branches); - } catch (e) { - console.error(`Failed to load branches for project ${project.id}:`, e); - branchesMap.set(project.id, []); - } - }) - ); - projectBranches = branchesMap; + const { data: initialProjects, revalidating: projectsRevalidating } = + await commands.listProjects(); + await applyProjects(initialProjects); + loading = false; - projectRunActionsStore.hydrateFromProjectBranches(branchesMap).catch(console.error); + if (projectsRevalidating) { + const fresh = await projectsRevalidating; + await applyProjects(fresh); + } } catch (e) { error = e instanceof Error ? e.message : String(e); } finally { @@ -361,14 +353,60 @@ } } + async function applyProjects(loadedProjects: Project[]) { + projects = loadedProjects; + setProjects(loadedProjects); + void hydrateRepos(loadedProjects); + + const branchesMap = new Map(); + const branchRevalidations: Array<{ projectId: string; promise: Promise }> = []; + await Promise.all( + loadedProjects.map(async (project) => { + try { + const { data: branches, revalidating } = await commands.listBranchesForProject( + project.id + ); + branchesMap.set(project.id, branches); + if (revalidating) { + branchRevalidations.push({ projectId: project.id, promise: revalidating }); + } + } catch (e) { + console.error(`Failed to load branches for project ${project.id}:`, e); + branchesMap.set(project.id, []); + } + }) + ); + projectBranches = branchesMap; + projectRunActionsStore.hydrateFromProjectBranches(branchesMap).catch(console.error); + + if (branchRevalidations.length > 0) { + void Promise.all( + branchRevalidations.map(async ({ projectId, promise }) => { + try { + const fresh = await promise; + projectBranches = new Map(projectBranches).set(projectId, fresh); + } catch (e) { + console.error(`Failed to revalidate branches for project ${projectId}:`, e); + } + }) + ).then(() => + projectRunActionsStore.hydrateFromProjectBranches(projectBranches).catch(console.error) + ); + } + } + async function hydrateRepos(projectList: Project[]) { const generation = ++repoLoadGeneration; reposHydrating = true; try { + const revalidations: Array<{ projectId: string; promise: Promise }> = []; const entries = await Promise.all( projectList.map(async (project) => { try { - const repos = await commands.listProjectRepos(project.id); + const { data: repos, revalidating } = await commands.listProjectRepos(project.id); + if (revalidating) { + revalidations.push({ projectId: project.id, promise: revalidating }); + } return [project.id, repos] as const; } catch (e) { console.error(`[ProjectsList] Failed to load repos for project '${project.id}':`, e); @@ -384,6 +422,20 @@ repos.map((r) => ({ githubRepo: r.githubRepo, subpath: r.subpath })) ); void repoBadgeStore.ensureForRepos(allRepos); + + for (const { projectId, promise } of revalidations) { + void promise + .then((fresh) => { + if (generation !== repoLoadGeneration) return; + reposByProject = new Map(reposByProject).set(projectId, fresh); + void repoBadgeStore.ensureForRepos( + fresh.map((r) => ({ githubRepo: r.githubRepo, subpath: r.subpath })) + ); + }) + .catch((e) => { + console.error(`[ProjectsList] Failed to revalidate repos for '${projectId}':`, e); + }); + } } finally { if (generation === repoLoadGeneration) { reposHydrating = false; diff --git a/apps/staged/src/lib/features/sessions/SessionModal.svelte b/apps/staged/src/lib/features/sessions/SessionModal.svelte index e91f08323..2c56a5412 100644 --- a/apps/staged/src/lib/features/sessions/SessionModal.svelte +++ b/apps/staged/src/lib/features/sessions/SessionModal.svelte @@ -400,14 +400,26 @@ loading = true; error = null; try { - const [s, msgs] = await Promise.all([getSession(sessionId), getSessionMessages(sessionId)]); + const [s, msgsResult] = await Promise.all([ + getSession(sessionId), + getSessionMessages(sessionId), + ]); if (closed) return; if (!s) { error = 'Session not found'; return; } session = s; - messages = msgs; + messages = msgsResult.data; + if (msgsResult.revalidating) { + msgsResult.revalidating + .then((fresh) => { + if (closed) return; + messages = fresh; + scrollToBottomIfNear(true); + }) + .catch(() => {}); + } } catch (e) { error = e instanceof Error ? e.message : String(e); } finally { @@ -433,7 +445,7 @@ // Incremental message fetch if (messages.length === 0) { - const msgs = await getSessionMessages(sessionId); + const { data: msgs } = await getSessionMessages(sessionId); if (closed) return; if (msgs.length > 0) { messages = msgs; diff --git a/apps/staged/src/lib/features/settings/ActionsSettingsPanel.svelte b/apps/staged/src/lib/features/settings/ActionsSettingsPanel.svelte index ac7a3f232..e02c4c246 100644 --- a/apps/staged/src/lib/features/settings/ActionsSettingsPanel.svelte +++ b/apps/staged/src/lib/features/settings/ActionsSettingsPanel.svelte @@ -199,10 +199,10 @@ const contextIdByRepo = new Map( actionContexts.map((context) => [repoKey(context.githubRepo, context.subpath), context.id]) ); - const projects = await commands.listProjects(); + const { data: projects } = await commands.listProjects(); const reposByProject = await Promise.all( projects.map(async (project) => { - const repos = await commands.listProjectRepos(project.id); + const { data: repos } = await commands.listProjectRepos(project.id); return { project, repos }; }) ); diff --git a/apps/staged/src/lib/features/timeline/liveSessionHints.ts b/apps/staged/src/lib/features/timeline/liveSessionHints.ts index 1f7b661be..afa07218e 100644 --- a/apps/staged/src/lib/features/timeline/liveSessionHints.ts +++ b/apps/staged/src/lib/features/timeline/liveSessionHints.ts @@ -231,7 +231,7 @@ export function createLiveSessionHints( const updatedMessages = tracker.lastMessageId === null - ? await commands.getSessionMessages(sessionId) + ? (await commands.getSessionMessages(sessionId)).data : await commands.getSessionMessagesSince(sessionId, tracker.lastMessageId); if (destroyed || !hintTrackers.has(sessionId)) return; diff --git a/apps/staged/src/lib/listeners/cacheInvalidationListener.ts b/apps/staged/src/lib/listeners/cacheInvalidationListener.ts new file mode 100644 index 000000000..a7be3e13c --- /dev/null +++ b/apps/staged/src/lib/listeners/cacheInvalidationListener.ts @@ -0,0 +1,54 @@ +/** + * Event-driven cache invalidation listener. + * + * Listens for backend events (pr-status-changed, branch-git-state-changed) + * and invalidates the corresponding IndexedDB cache entries so that stale + * data is never served after the backend pushes an update. + */ + +import { listenToEvent, type UnlistenFn } from '../transport'; +import { invalidateCache, invalidateCacheByArgs, invalidateCacheByCommand } from '../cache'; +import { invalidateBranchTimeline } from '../commands'; +import type { PrStatusChangedEvent, SessionStatusPayload } from '../types'; + +interface BranchGitStateChangedEvent { + branchId: string; +} + +export async function listenForCacheInvalidation(): Promise { + const unlisteners: UnlistenFn[] = []; + + // PR status changed → invalidate branch listings (they embed PR state) + unlisteners.push( + await listenToEvent('pr-status-changed', () => { + invalidateCacheByCommand('list_branches_for_project'); + }) + ); + + // Branch git state changed → invalidate timeline and diff caches + unlisteners.push( + await listenToEvent('branch-git-state-changed', (payload) => { + invalidateBranchTimeline(payload.branchId); + invalidateCacheByArgs('get_diff_files', { branchId: payload.branchId }); + invalidateCacheByArgs('get_file_diff', { branchId: payload.branchId }); + }) + ); + + // Session status changed → invalidate cached session messages when a session + // completes, errors, or is cancelled (messages are now final) + unlisteners.push( + await listenToEvent('session-status-changed', (payload) => { + if ( + payload.status === 'completed' || + payload.status === 'error' || + payload.status === 'cancelled' + ) { + invalidateCache('get_session_messages', { sessionId: payload.sessionId }); + } + }) + ); + + return () => { + for (const unlisten of unlisteners) unlisten(); + }; +} diff --git a/apps/staged/src/lib/listeners/pageLifecycleListener.test.ts b/apps/staged/src/lib/listeners/pageLifecycleListener.test.ts new file mode 100644 index 000000000..b6a0658ac --- /dev/null +++ b/apps/staged/src/lib/listeners/pageLifecycleListener.test.ts @@ -0,0 +1,136 @@ +// @vitest-environment jsdom +import 'fake-indexeddb/auto'; +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; + +// Mock transport — web mode +vi.mock('../transport', () => ({ + isTauri: false, + invokeCommand: vi.fn(), +})); + +// Spy on markAllStale +const mockMarkAllStale = vi.fn().mockResolvedValue(undefined); +vi.mock('../cache', () => ({ + markAllStale: (...args: unknown[]) => mockMarkAllStale(...args), +})); + +import { + listenForPageLifecycle, + _setLastActivityTimestamp, + _getLastActivityTimestamp, + _STALE_THRESHOLD_MS, +} from './pageLifecycleListener'; + +describe('pageLifecycleListener', () => { + let unlisten: () => void; + let cacheStaleEvents: Event[]; + + function onCacheStale(e: Event) { + cacheStaleEvents.push(e); + } + + beforeEach(() => { + mockMarkAllStale.mockClear(); + cacheStaleEvents = []; + window.addEventListener('cache-stale', onCacheStale); + _setLastActivityTimestamp(Date.now()); + unlisten = listenForPageLifecycle(); + }); + + afterEach(() => { + unlisten(); + window.removeEventListener('cache-stale', onCacheStale); + }); + + describe('resume event', () => { + it('marks all cache entries stale and dispatches cache-stale', async () => { + document.dispatchEvent(new Event('resume')); + + // markAllStale is async, give it a tick + await vi.waitFor(() => { + expect(mockMarkAllStale).toHaveBeenCalledTimes(1); + }); + expect(cacheStaleEvents).toHaveLength(1); + }); + + it('updates lastActivityTimestamp after resume', async () => { + _setLastActivityTimestamp(0); + const before = Date.now(); + document.dispatchEvent(new Event('resume')); + + await vi.waitFor(() => { + expect(mockMarkAllStale).toHaveBeenCalled(); + }); + expect(_getLastActivityTimestamp()).toBeGreaterThanOrEqual(before); + }); + }); + + describe('visibilitychange event', () => { + it('marks stale when returning after >30s gap', async () => { + // Simulate being hidden for longer than the threshold + _setLastActivityTimestamp(Date.now() - _STALE_THRESHOLD_MS - 1000); + + Object.defineProperty(document, 'visibilityState', { + value: 'visible', + writable: true, + configurable: true, + }); + document.dispatchEvent(new Event('visibilitychange')); + + await vi.waitFor(() => { + expect(mockMarkAllStale).toHaveBeenCalledTimes(1); + }); + expect(cacheStaleEvents).toHaveLength(1); + }); + + it('does NOT mark stale when returning within 30s', async () => { + // Activity was recent + _setLastActivityTimestamp(Date.now() - 1000); + + Object.defineProperty(document, 'visibilityState', { + value: 'visible', + writable: true, + configurable: true, + }); + document.dispatchEvent(new Event('visibilitychange')); + + // Give a tick for any async work + await new Promise((r) => setTimeout(r, 10)); + expect(mockMarkAllStale).not.toHaveBeenCalled(); + expect(cacheStaleEvents).toHaveLength(0); + }); + + it('records timestamp when going hidden', () => { + const before = Date.now(); + + Object.defineProperty(document, 'visibilityState', { + value: 'hidden', + writable: true, + configurable: true, + }); + document.dispatchEvent(new Event('visibilitychange')); + + expect(_getLastActivityTimestamp()).toBeGreaterThanOrEqual(before); + expect(mockMarkAllStale).not.toHaveBeenCalled(); + }); + }); + + describe('cleanup', () => { + it('removes listeners on unlisten', async () => { + unlisten(); + + _setLastActivityTimestamp(0); + document.dispatchEvent(new Event('resume')); + Object.defineProperty(document, 'visibilityState', { + value: 'visible', + writable: true, + configurable: true, + }); + document.dispatchEvent(new Event('visibilitychange')); + + await new Promise((r) => setTimeout(r, 10)); + expect(mockMarkAllStale).not.toHaveBeenCalled(); + expect(cacheStaleEvents).toHaveLength(0); + }); + }); +}); diff --git a/apps/staged/src/lib/listeners/pageLifecycleListener.ts b/apps/staged/src/lib/listeners/pageLifecycleListener.ts new file mode 100644 index 000000000..23d9046bf --- /dev/null +++ b/apps/staged/src/lib/listeners/pageLifecycleListener.ts @@ -0,0 +1,60 @@ +/** + * Page Lifecycle integration for cache staleness detection. + * + * Listens for `resume` and `visibilitychange` events to detect when the + * browser tab (or iOS PWA) has been frozen and restored. When a significant + * time gap is detected (>30s), all IndexedDB cache entries are marked stale + * and a `cache-stale` CustomEvent is dispatched so components can revalidate. + */ + +import { isTauri } from '../transport'; +import { markAllStale } from '../cache'; + +const STALE_THRESHOLD_MS = 30_000; + +let lastActivityTimestamp = Date.now(); + +async function handleResume() { + await markAllStale(); + window.dispatchEvent(new CustomEvent('cache-stale')); + lastActivityTimestamp = Date.now(); +} + +async function handleVisibilityChange() { + if (document.visibilityState === 'visible') { + const now = Date.now(); + if (now - lastActivityTimestamp > STALE_THRESHOLD_MS) { + await markAllStale(); + window.dispatchEvent(new CustomEvent('cache-stale')); + } + lastActivityTimestamp = now; + } else { + // Record when we went hidden so the gap is measured from this point + lastActivityTimestamp = Date.now(); + } +} + +/** + * Start listening for page lifecycle events. Returns an unlisten function. + * No-ops in Tauri mode (no page eviction). + */ +export function listenForPageLifecycle(): () => void { + if (isTauri) return () => {}; + + document.addEventListener('resume', handleResume); + document.addEventListener('visibilitychange', handleVisibilityChange); + + return () => { + document.removeEventListener('resume', handleResume); + document.removeEventListener('visibilitychange', handleVisibilityChange); + }; +} + +// Exported for testing +export function _setLastActivityTimestamp(ts: number) { + lastActivityTimestamp = ts; +} +export function _getLastActivityTimestamp() { + return lastActivityTimestamp; +} +export { STALE_THRESHOLD_MS as _STALE_THRESHOLD_MS }; diff --git a/apps/staged/src/lib/listeners/sessionStatusListener.ts b/apps/staged/src/lib/listeners/sessionStatusListener.ts index 6789cd0b2..b2d4065a6 100644 --- a/apps/staged/src/lib/listeners/sessionStatusListener.ts +++ b/apps/staged/src/lib/listeners/sessionStatusListener.ts @@ -10,6 +10,7 @@ */ import { listenToEvent, type UnlistenFn } from '../transport'; +import { invalidateBranchTimeline } from '../commands'; import * as commands from '../api/commands'; import { classifyCompletedPushSession, @@ -49,6 +50,10 @@ export function listenForSessionStatus(): Promise { } if (status === 'completed' || status === 'error' || status === 'cancelled') { + // Invalidate cached timeline for the branch affected by this session + if (eventBranchId) { + invalidateBranchTimeline(eventBranchId); + } handleSessionEnd(sessionId, status); } }); @@ -91,7 +96,7 @@ async function handlePrCompletion(sessionId: string, branchId: string, status: S if (status === 'completed') { try { // Try session messages first (AI session writes PR_URL: marker). - const messages = await commands.getSessionMessages(sessionId); + const messages = await commands.getFreshSessionMessages(sessionId); let foundUrl = extractPrUrl(messages); // Also check pipeline step outputs for older or partially migrated PR sessions. @@ -150,7 +155,7 @@ async function handlePushCompletion(sessionId: string, branchId: string, status: try { const session = await commands.getSession(sessionId); const pipeline = session?.pipeline; - const messages = await commands.getSessionMessages(sessionId); + const messages = await commands.getFreshSessionMessages(sessionId); const outcome = classifyCompletedPushSession(pipeline, messages); if (outcome === 'rejected_non_fast_forward') { diff --git a/apps/staged/src/main.ts b/apps/staged/src/main.ts index c049299cf..bf6f9cd85 100644 --- a/apps/staged/src/main.ts +++ b/apps/staged/src/main.ts @@ -2,6 +2,16 @@ import { mount } from 'svelte'; import './app.css'; import App from './App.svelte'; +if ( + import.meta.env.PROD && + 'serviceWorker' in navigator && + ['http:', 'https:'].includes(window.location.protocol) +) { + navigator.serviceWorker + .register('/sw.js') + .catch((error) => console.warn('Service worker registration failed', error)); +} + const app = mount(App, { target: document.getElementById('app')!, }); diff --git a/apps/staged/src/service-worker.js b/apps/staged/src/service-worker.js new file mode 100644 index 000000000..6ed1a1810 --- /dev/null +++ b/apps/staged/src/service-worker.js @@ -0,0 +1,71 @@ +// @ts-nocheck +/// + +const CACHE_NAME = '__STAGED_CACHE_NAME__'; + +// Install: pre-cache the app shell entry point. +// Vite-hashed assets will be cached on first fetch via the fetch handler. +self.addEventListener('install', (event) => { + event.waitUntil(caches.open(CACHE_NAME).then((cache) => cache.addAll(['/']))); + // Activate immediately instead of waiting for old tabs to close. + self.skipWaiting(); +}); + +// Activate: clean up old caches from previous versions. +self.addEventListener('activate', (event) => { + event.waitUntil( + caches.keys().then((keys) => + Promise.all( + keys + .filter((key) => key !== CACHE_NAME) + .map((key) => caches.delete(key)) + ) + ) + ); + // Start controlling all open clients immediately. + self.clients.claim(); +}); + +// Fetch: network-first for navigation and API, cache-first for hashed assets. +self.addEventListener('fetch', (event) => { + const url = new URL(event.request.url); + + // Never cache API calls or WebSocket upgrades. + if (url.pathname.startsWith('/api/')) return; + + // Navigation requests (HTML pages): network-first with cache fallback. + if (event.request.mode === 'navigate') { + event.respondWith( + fetch(event.request) + .then((response) => { + const clone = response.clone(); + caches.open(CACHE_NAME).then((cache) => cache.put(event.request, clone)); + return response; + }) + .catch(() => caches.match(event.request)) + ); + return; + } + + // Static assets (JS, CSS, images): Vite hashes these filenames, so they are + // immutable and safe to serve cache-first. + if ( + url.pathname.startsWith('/assets/') || + url.pathname.endsWith('.svg') || + url.pathname.endsWith('.png') || + url.pathname.endsWith('.ico') + ) { + event.respondWith( + caches.match(event.request).then( + (cached) => + cached || + fetch(event.request).then((response) => { + const clone = response.clone(); + caches.open(CACHE_NAME).then((cache) => cache.put(event.request, clone)); + return response; + }) + ) + ); + return; + } +}); diff --git a/apps/staged/vite.config.ts b/apps/staged/vite.config.ts index 8fab243e1..579bc7905 100644 --- a/apps/staged/vite.config.ts +++ b/apps/staged/vite.config.ts @@ -1,11 +1,17 @@ -import { readFileSync } from 'node:fs'; -import { resolve } from 'node:path'; -import { defineConfig } from 'vite'; +import { createHash } from 'node:crypto'; +import { existsSync, readFileSync, readdirSync } from 'node:fs'; +import { relative, resolve } from 'node:path'; +import { defineConfig, type Plugin, type Rollup } from 'vite'; import { svelte } from '@sveltejs/vite-plugin-svelte'; const port = parseInt(process.env.VITE_PORT || '5174', 10); +const rootDir = import.meta.dirname; +const publicDir = resolve(rootDir, 'public'); +const serviceWorkerTemplatePath = resolve(rootDir, 'src/service-worker.js'); +const serviceWorkerCacheNamePlaceholder = '__STAGED_CACHE_NAME__'; +const serviceWorkerCacheHashLength = 12; const packageJson = JSON.parse( - readFileSync(resolve(import.meta.dirname, 'package.json'), 'utf8') + readFileSync(resolve(rootDir, 'package.json'), 'utf8') ) as { version: string }; const webCertPath = process.env.STAGED_WEB_CERT_PATH; const webKeyPath = process.env.STAGED_WEB_KEY_PATH; @@ -26,12 +32,108 @@ const webHttps = } : undefined; +type HashInput = { + contents: string | Uint8Array; + fileName: string; + kind: 'bundle' | 'public' | 'template'; +}; + +function generatedServiceWorkerPlugin(): Plugin { + return { + name: 'staged-generated-service-worker', + apply: 'build', + enforce: 'post', + generateBundle(_options, bundle) { + const template = readFileSync(serviceWorkerTemplatePath, 'utf8'); + + if (!template.includes(serviceWorkerCacheNamePlaceholder)) { + throw new Error( + `Service worker template must contain ${serviceWorkerCacheNamePlaceholder}` + ); + } + + const cacheName = `staged-${hashInputs([ + ...collectBundleInputs(bundle), + ...collectPublicAssetInputs(publicDir), + { + contents: template, + fileName: 'src/service-worker.js', + kind: 'template', + }, + ])}`; + + this.emitFile({ + fileName: 'sw.js', + source: template.replaceAll(serviceWorkerCacheNamePlaceholder, cacheName), + type: 'asset', + }); + }, + }; +} + +function collectBundleInputs(bundle: Rollup.OutputBundle): HashInput[] { + return Object.values(bundle).map((output) => ({ + contents: output.type === 'chunk' ? output.code : output.source, + fileName: output.fileName, + kind: 'bundle', + })); +} + +function collectPublicAssetInputs(directory: string): HashInput[] { + if (!existsSync(directory)) { + return []; + } + + return collectFiles(directory).map((filePath) => ({ + contents: readFileSync(filePath), + fileName: toPosixPath(relative(directory, filePath)), + kind: 'public', + })); +} + +function collectFiles(directory: string): string[] { + return readdirSync(directory, { withFileTypes: true }).flatMap((entry) => { + const filePath = resolve(directory, entry.name); + + if (entry.isDirectory()) { + return collectFiles(filePath); + } + + return entry.isFile() ? [filePath] : []; + }); +} + +function hashInputs(inputs: HashInput[]): string { + const hash = createHash('sha256'); + + for (const input of [...inputs].sort(compareHashInputs)) { + hash.update(input.kind); + hash.update('\0'); + hash.update(input.fileName); + hash.update('\0'); + hash.update( + typeof input.contents === 'string' ? input.contents : Buffer.from(input.contents) + ); + hash.update('\0'); + } + + return hash.digest('hex').slice(0, serviceWorkerCacheHashLength); +} + +function compareHashInputs(left: HashInput, right: HashInput): number { + return `${left.kind}:${left.fileName}`.localeCompare(`${right.kind}:${right.fileName}`); +} + +function toPosixPath(filePath: string): string { + return filePath.replaceAll('\\', '/'); +} + // https://vite.dev/config/ export default defineConfig({ define: { __APP_VERSION__: JSON.stringify(packageJson.version), }, - plugins: [svelte()], + plugins: [svelte(), generatedServiceWorkerPlugin()], server: { // Network access (0.0.0.0) is enabled via `--host` in `just dev-web`. // Default `dev` stays on localhost to avoid exposing the dev server. diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 2b92c6a41..e4f824f3d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -58,7 +58,7 @@ importers: version: 5.53.6 svelte-check: specifier: ^4.3.4 - version: 4.4.4(picomatch@4.0.3)(svelte@5.53.6)(typescript@5.9.3) + version: 4.4.4(picomatch@4.0.4)(svelte@5.53.6)(typescript@5.9.3) typescript: specifier: ~5.9.3 version: 5.9.3 @@ -198,6 +198,9 @@ importers: ansi-to-html: specifier: ^0.7.2 version: 0.7.2 + idb-keyval: + specifier: ^6.2.2 + version: 6.2.2 lucide-svelte: specifier: ^0.577.0 version: 0.577.0(svelte@5.53.6) @@ -223,6 +226,9 @@ importers: '@types/node': specifier: ^24.10.1 version: 24.11.0 + fake-indexeddb: + specifier: ^6.2.5 + version: 6.2.5 prettier: specifier: ^3.7.4 version: 3.8.1 @@ -1798,6 +1804,10 @@ packages: extend@3.0.2: resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==} + fake-indexeddb@6.2.5: + resolution: {integrity: sha512-CGnyrvbhPlWYMngksqrSSUT1BAVP49dZocrHuK0SvtR0D5TMs5wP0o3j7jexDJW01KSadjBp1M/71o/KR3nD1w==} + engines: {node: '>=18'} + fast-deep-equal@3.1.3: resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} @@ -1909,6 +1919,9 @@ packages: resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} engines: {node: '>=0.10.0'} + idb-keyval@6.2.2: + resolution: {integrity: sha512-yjD9nARJ/jb1g+CvD0tlhUHOrJ9Sy0P8T9MF3YaLlHnSRpwPfpTX0XIvpmw3gAJUmEu3FiICLBDPXVwyEvrleg==} + ignore@5.3.2: resolution: {integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==} engines: {node: '>= 4'} @@ -4357,6 +4370,8 @@ snapshots: extend@3.0.2: {} + fake-indexeddb@6.2.5: {} + fast-deep-equal@3.1.3: {} fast-json-stable-stringify@2.1.0: {} @@ -4521,6 +4536,8 @@ snapshots: dependencies: safer-buffer: 2.1.2 + idb-keyval@6.2.2: {} + ignore@5.3.2: {} imurmurhash@0.1.4: {} @@ -5470,18 +5487,6 @@ snapshots: stylis@4.3.6: {} - svelte-check@4.4.4(picomatch@4.0.3)(svelte@5.53.6)(typescript@5.9.3): - dependencies: - '@jridgewell/trace-mapping': 0.3.31 - chokidar: 4.0.3 - fdir: 6.5.0(picomatch@4.0.3) - picocolors: 1.1.1 - sade: 1.8.1 - svelte: 5.53.6 - typescript: 5.9.3 - transitivePeerDependencies: - - picomatch - svelte-check@4.4.4(picomatch@4.0.4)(svelte@5.53.6)(typescript@5.9.3): dependencies: '@jridgewell/trace-mapping': 0.3.31