diff --git a/apps/staged/src/lib/cache.test.ts b/apps/staged/src/lib/cache.test.ts
new file mode 100644
index 000000000..6dcb2e083
--- /dev/null
+++ b/apps/staged/src/lib/cache.test.ts
@@ -0,0 +1,456 @@
+import 'fake-indexeddb/auto';
+import { beforeEach, describe, expect, it, vi } from 'vitest';
+
+// Mock transport — web mode (isTauri = false) with controllable invokeCommand
+const mockInvoke = vi.fn();
+vi.mock('./transport', () => ({
+ isTauri: false,
+ invokeCommand: (...args: unknown[]) => mockInvoke(...args),
+}));
+
+import {
+ cachedInvoke,
+ cachedCommand,
+ invalidateCache,
+ invalidateCacheByArgs,
+ invalidateCacheByCommand,
+ markAllStale,
+ clearAllCache,
+ _cacheKey,
+ _CACHE_SCHEMA_VERSION,
+ _MAX_CACHE_ENTRIES,
+ _evictIfNeeded,
+} from './cache';
+
+beforeEach(async () => {
+ mockInvoke.mockReset();
+ await clearAllCache();
+});
+
+describe('cacheKey', () => {
+ it('produces deterministic keys regardless of arg order', () => {
+ expect(_cacheKey('cmd', { b: 2, a: 1 })).toBe(_cacheKey('cmd', { a: 1, b: 2 }));
+ });
+
+ it('produces different keys for different commands', () => {
+ expect(_cacheKey('foo', { a: 1 })).not.toBe(_cacheKey('bar', { a: 1 }));
+ });
+
+ it('handles undefined args', () => {
+ expect(_cacheKey('cmd')).toBe('cmd:');
+ });
+});
+
+describe('cachedInvoke', () => {
+ it('yields only network result on cache miss', async () => {
+ mockInvoke.mockResolvedValue({ items: [1, 2] });
+
+ const results = [];
+ for await (const r of cachedInvoke('list', undefined, { ttl: 60_000 })) {
+ results.push(r);
+ }
+
+ expect(results).toEqual([{ data: { items: [1, 2] }, source: 'network' }]);
+ expect(mockInvoke).toHaveBeenCalledWith('list', undefined);
+ });
+
+ it('short-circuits with only cache when entry is fresh', async () => {
+ mockInvoke.mockResolvedValue('first');
+
+ // Prime the cache
+ const primeResults = [];
+ for await (const r of cachedInvoke('cmd', { id: '1' }, { ttl: 60_000 })) {
+ primeResults.push(r);
+ }
+ expect(primeResults).toHaveLength(1);
+
+ // Second call within TTL should yield only cache (no network call)
+ mockInvoke.mockResolvedValue('second');
+ const results = [];
+ for await (const r of cachedInvoke('cmd', { id: '1' }, { ttl: 60_000 })) {
+ results.push(r);
+ }
+
+ expect(results).toEqual([{ data: 'first', source: 'cache' }]);
+ expect(mockInvoke).toHaveBeenCalledTimes(1); // no second network call
+ });
+
+ it('yields expired cache then revalidates from network', async () => {
+ mockInvoke.mockResolvedValue('data');
+
+ // Prime with ttl=1ms
+ for await (const _ of cachedInvoke('cmd', undefined, { ttl: 1 })) {
+ /* consume */
+ }
+
+ // Wait for expiry
+ await new Promise((r) => setTimeout(r, 5));
+
+ mockInvoke.mockResolvedValue('fresh');
+ const results = [];
+ for await (const r of cachedInvoke('cmd', undefined, { ttl: 1 })) {
+ results.push(r);
+ }
+
+ // Expired entry is still usable — yield stale cache, then network
+ expect(results).toEqual([
+ { data: 'data', source: 'cache' },
+ { data: 'fresh', source: 'network' },
+ ]);
+ });
+
+ it('swallows network errors when usable stale cache exists', async () => {
+ mockInvoke.mockResolvedValue('cached-data');
+
+ // Prime
+ for await (const _ of cachedInvoke('cmd', undefined, { ttl: 60_000 })) {
+ /* consume */
+ }
+
+ // Mark stale so revalidation is attempted
+ await markAllStale();
+
+ // Network fails on second call
+ mockInvoke.mockRejectedValue(new Error('offline'));
+ const results = [];
+ for await (const r of cachedInvoke('cmd', undefined, { ttl: 60_000 })) {
+ results.push(r);
+ }
+
+ // Stale cache is still served despite network failure
+ expect(results).toEqual([{ data: 'cached-data', source: 'cache' }]);
+ });
+
+ it('throws network errors when no valid cache exists', async () => {
+ mockInvoke.mockRejectedValue(new Error('offline'));
+
+ const results = [];
+ let thrown: Error | undefined;
+ try {
+ for await (const r of cachedInvoke('cmd', undefined, { ttl: 60_000 })) {
+ results.push(r);
+ }
+ } catch (e) {
+ thrown = e as Error;
+ }
+
+ expect(thrown?.message).toBe('offline');
+ expect(results).toEqual([]);
+ });
+});
+
+describe('cachedCommand', () => {
+ it('returns network result with no revalidation on cache miss', async () => {
+ mockInvoke.mockResolvedValue('value');
+
+ const result = await cachedCommand
('cmd', undefined, { ttl: 60_000 });
+
+ expect(result.data).toBe('value');
+ expect(result.revalidating).toBeNull();
+ });
+
+ it('returns only cached data when entry is fresh', async () => {
+ mockInvoke.mockResolvedValue('v1');
+ await cachedCommand('cmd', undefined, { ttl: 60_000 });
+
+ mockInvoke.mockResolvedValue('v2');
+ const result = await cachedCommand('cmd', undefined, { ttl: 60_000 });
+
+ // Fresh entry short-circuits — no network call
+ expect(result.data).toBe('v1');
+ expect(result.revalidating).toBeNull();
+ expect(mockInvoke).toHaveBeenCalledTimes(1);
+ });
+
+ it('returns stale data with a revalidating promise that resolves to fresh', async () => {
+ mockInvoke.mockResolvedValue('cached');
+ await cachedCommand('cmd', undefined, { ttl: 60_000 });
+
+ await markAllStale();
+
+ mockInvoke.mockResolvedValue('fresh');
+ const result = await cachedCommand('cmd', undefined, { ttl: 60_000 });
+
+ expect(result.data).toBe('cached');
+ expect(result.revalidating).not.toBeNull();
+ await expect(result.revalidating).resolves.toBe('fresh');
+ });
+
+ it('keeps revalidating promise resolving to cached data when network fails', async () => {
+ mockInvoke.mockResolvedValue('cached-data');
+ await cachedCommand('cmd', undefined, { ttl: 60_000 });
+
+ await markAllStale();
+
+ mockInvoke.mockRejectedValue(new Error('offline'));
+ const result = await cachedCommand('cmd', undefined, { ttl: 60_000 });
+
+ expect(result.data).toBe('cached-data');
+ await expect(result.revalidating).resolves.toBe('cached-data');
+ });
+
+ it('throws on miss when the network fails', async () => {
+ mockInvoke.mockRejectedValue(new Error('offline'));
+ await expect(cachedCommand('cmd', undefined, { ttl: 60_000 })).rejects.toThrow('offline');
+ });
+});
+
+describe('invalidateCache', () => {
+ it('removes a specific entry so next call is a miss', async () => {
+ mockInvoke.mockResolvedValue('data');
+ await cachedCommand('cmd', { id: '1' }, { ttl: 60_000 });
+
+ await invalidateCache('cmd', { id: '1' });
+
+ mockInvoke.mockResolvedValue('fresh');
+ const results = [];
+ for await (const r of cachedInvoke('cmd', { id: '1' }, { ttl: 60_000 })) {
+ results.push(r);
+ }
+
+ // Only network, no cache hit
+ expect(results).toEqual([{ data: 'fresh', source: 'network' }]);
+ });
+});
+
+describe('invalidateCacheByCommand', () => {
+ it('removes all entries for a command', async () => {
+ mockInvoke.mockResolvedValue('a');
+ await cachedCommand('cmd', { id: '1' }, { ttl: 60_000 });
+ mockInvoke.mockResolvedValue('b');
+ await cachedCommand('cmd', { id: '2' }, { ttl: 60_000 });
+
+ await invalidateCacheByCommand('cmd');
+
+ mockInvoke.mockResolvedValue('fresh');
+ const results = [];
+ for await (const r of cachedInvoke('cmd', { id: '1' }, { ttl: 60_000 })) {
+ results.push(r);
+ }
+ expect(results).toEqual([{ data: 'fresh', source: 'network' }]);
+ });
+
+ it('does not affect other commands', async () => {
+ mockInvoke.mockResolvedValue('keep');
+ await cachedCommand('other', undefined, { ttl: 60_000 });
+
+ await invalidateCacheByCommand('cmd');
+
+ mockInvoke.mockResolvedValue('new');
+ const results = [];
+ for await (const r of cachedInvoke('other', undefined, { ttl: 60_000 })) {
+ results.push(r);
+ }
+ // Should still have cache hit (fresh, so no network call)
+ expect(results).toEqual([{ data: 'keep', source: 'cache' }]);
+ });
+});
+
+describe('invalidateCacheByArgs', () => {
+ it('removes matching entries for the same command and branchId', async () => {
+ mockInvoke.mockResolvedValue('branch-a-head');
+ await cachedCommand(
+ 'get_diff_files',
+ { branchId: 'branch-a', commitSha: undefined, scope: 'branch' },
+ { ttl: 60_000 }
+ );
+ mockInvoke.mockResolvedValue('branch-a-commit');
+ await cachedCommand(
+ 'get_diff_files',
+ { branchId: 'branch-a', commitSha: 'abc123', scope: 'commit' },
+ { ttl: 60_000 }
+ );
+
+ await invalidateCacheByArgs('get_diff_files', { branchId: 'branch-a' });
+
+ mockInvoke.mockResolvedValue('fresh');
+ const headResults = [];
+ for await (const r of cachedInvoke(
+ 'get_diff_files',
+ { branchId: 'branch-a', commitSha: undefined, scope: 'branch' },
+ { ttl: 60_000 }
+ )) {
+ headResults.push(r);
+ }
+
+ const commitResults = [];
+ for await (const r of cachedInvoke(
+ 'get_diff_files',
+ { branchId: 'branch-a', commitSha: 'abc123', scope: 'commit' },
+ { ttl: 60_000 }
+ )) {
+ commitResults.push(r);
+ }
+
+ expect(headResults).toEqual([{ data: 'fresh', source: 'network' }]);
+ expect(commitResults).toEqual([{ data: 'fresh', source: 'network' }]);
+ });
+
+ it('keeps entries for other branches cached', async () => {
+ mockInvoke.mockResolvedValue('branch-a');
+ await cachedCommand(
+ 'get_diff_files',
+ { branchId: 'branch-a', commitSha: 'abc123', scope: 'commit' },
+ { ttl: 60_000 }
+ );
+ mockInvoke.mockResolvedValue('branch-b');
+ await cachedCommand(
+ 'get_diff_files',
+ { branchId: 'branch-b', commitSha: 'def456', scope: 'commit' },
+ { ttl: 60_000 }
+ );
+
+ await invalidateCacheByArgs('get_diff_files', { branchId: 'branch-a' });
+
+ mockInvoke.mockResolvedValue('fresh');
+ const results = [];
+ for await (const r of cachedInvoke(
+ 'get_diff_files',
+ { branchId: 'branch-b', commitSha: 'def456', scope: 'commit' },
+ { ttl: 60_000 }
+ )) {
+ results.push(r);
+ }
+
+ expect(results).toEqual([{ data: 'branch-b', source: 'cache' }]);
+ });
+
+ it('keeps entries for other commands cached', async () => {
+ mockInvoke.mockResolvedValue('diff');
+ await cachedCommand(
+ 'get_diff_files',
+ { branchId: 'branch-a', commitSha: 'abc123', scope: 'commit' },
+ { ttl: 60_000 }
+ );
+ mockInvoke.mockResolvedValue('messages');
+ await cachedCommand('get_session_messages', { branchId: 'branch-a' }, { ttl: 60_000 });
+
+ await invalidateCacheByArgs('get_diff_files', { branchId: 'branch-a' });
+
+ mockInvoke.mockResolvedValue('fresh');
+ const results = [];
+ for await (const r of cachedInvoke(
+ 'get_session_messages',
+ { branchId: 'branch-a' },
+ { ttl: 60_000 }
+ )) {
+ results.push(r);
+ }
+
+ expect(results).toEqual([{ data: 'messages', source: 'cache' }]);
+ });
+
+ it('matches entries with optional args missing when branchId matches', async () => {
+ mockInvoke.mockResolvedValue('branch-diff');
+ await cachedCommand(
+ 'get_diff_files',
+ { branchId: 'branch-a', scope: 'branch' },
+ { ttl: 60_000 }
+ );
+
+ await invalidateCacheByArgs('get_diff_files', { branchId: 'branch-a' });
+
+ mockInvoke.mockResolvedValue('fresh');
+ const results = [];
+ for await (const r of cachedInvoke(
+ 'get_diff_files',
+ { branchId: 'branch-a', scope: 'branch' },
+ { ttl: 60_000 }
+ )) {
+ results.push(r);
+ }
+
+ expect(results).toEqual([{ data: 'fresh', source: 'network' }]);
+ });
+});
+
+describe('markAllStale', () => {
+ it('yields stale cache first then revalidates from network', async () => {
+ mockInvoke.mockResolvedValue('cached');
+ await cachedCommand('cmd', undefined, { ttl: 60_000 });
+
+ await markAllStale();
+
+ mockInvoke.mockResolvedValue('fresh');
+ const results = [];
+ for await (const r of cachedInvoke('cmd', undefined, { ttl: 60_000 })) {
+ results.push(r);
+ }
+
+ expect(results).toEqual([
+ { data: 'cached', source: 'cache' },
+ { data: 'fresh', source: 'network' },
+ ]);
+ });
+
+ it('cachedCommand returns stale value with a revalidating promise resolving to fresh', async () => {
+ mockInvoke.mockResolvedValue('cached');
+ await cachedCommand('cmd', undefined, { ttl: 60_000 });
+
+ await markAllStale();
+
+ mockInvoke.mockResolvedValue('fresh');
+ const result = await cachedCommand('cmd', undefined, { ttl: 60_000 });
+
+ expect(result.data).toBe('cached');
+ expect(result.revalidating).not.toBeNull();
+ await expect(result.revalidating).resolves.toBe('fresh');
+ });
+
+ it('revalidation clears the stale flag', async () => {
+ mockInvoke.mockResolvedValue('v1');
+ await cachedCommand('cmd', undefined, { ttl: 60_000 });
+
+ await markAllStale();
+
+ // Revalidate — await the revalidation promise so the cache write completes
+ mockInvoke.mockResolvedValue('v2');
+ const { revalidating } = await cachedCommand('cmd', undefined, { ttl: 60_000 });
+ await revalidating;
+
+ // Now the entry should be fresh — short-circuit with only cache
+ mockInvoke.mockResolvedValue('v3');
+ const results = [];
+ for await (const r of cachedInvoke('cmd', undefined, { ttl: 60_000 })) {
+ results.push(r);
+ }
+
+ expect(results).toEqual([{ data: 'v2', source: 'cache' }]);
+ expect(mockInvoke).toHaveBeenCalledTimes(2); // v1 + v2, not v3
+ });
+});
+
+describe('evictIfNeeded', () => {
+ it('evicts oldest entries when cache exceeds MAX_CACHE_ENTRIES', async () => {
+ // Fill cache beyond the limit
+ const total = _MAX_CACHE_ENTRIES + 10;
+ for (let i = 0; i < total; i++) {
+ mockInvoke.mockResolvedValue(`value-${i}`);
+ await cachedCommand('cmd', { id: String(i) }, { ttl: 60_000 });
+ }
+
+ // Explicit eviction (also triggered by cacheSet, but let's verify directly)
+ await _evictIfNeeded();
+
+ // Verify: the oldest entries should have been evicted.
+ // The first 10 entries (id 0-9) should be gone; entries 10+ should remain.
+ mockInvoke.mockResolvedValue('new');
+
+ // Entry 0 should be a cache miss (evicted)
+ const missResults = [];
+ for await (const r of cachedInvoke('cmd', { id: '0' }, { ttl: 60_000 })) {
+ missResults.push(r);
+ }
+ expect(missResults).toEqual([{ data: 'new', source: 'network' }]);
+
+ // Entry at the tail (most recent) should still be a cache hit
+ const hitResults = [];
+ for await (const r of cachedInvoke('cmd', { id: String(total - 1) }, { ttl: 60_000 })) {
+ hitResults.push(r);
+ }
+ expect(hitResults[0]).toEqual({
+ data: `value-${total - 1}`,
+ source: 'cache',
+ });
+ });
+});
diff --git a/apps/staged/src/lib/cache.ts b/apps/staged/src/lib/cache.ts
new file mode 100644
index 000000000..ec49d5025
--- /dev/null
+++ b/apps/staged/src/lib/cache.ts
@@ -0,0 +1,289 @@
+import { get, set, del, keys, entries, clear, createStore } from 'idb-keyval';
+import { invokeCommand, isTauri } from './transport';
+
+const CACHE_SCHEMA_VERSION = 1;
+const MAX_CACHE_ENTRIES = 200;
+
+/**
+ * Tracks the last invalidation time per cache key. When a read starts, it
+ * captures the current epoch for that key. If the key is invalidated while
+ * the network request is in flight, the epoch advances and the stale write
+ * is skipped — preventing pre-mutation data from repopulating the cache.
+ */
+const invalidationEpochs = new Map();
+
+function getEpoch(key: string): number {
+ return invalidationEpochs.get(key) ?? 0;
+}
+
+function bumpEpoch(key: string): void {
+ invalidationEpochs.set(key, Date.now());
+}
+
+let cacheStore: ReturnType | undefined;
+
+function getStore() {
+ if (!cacheStore) {
+ cacheStore = createStore('staged-cache', 'responses');
+ }
+ return cacheStore;
+}
+
+interface CacheEntry {
+ key: string;
+ data: T;
+ fetchedAt: number;
+ schemaVersion: number;
+ stale?: boolean;
+}
+
+export interface CacheConfig {
+ ttl: number;
+}
+
+/**
+ * Result of a cached command call.
+ *
+ * `data` is the best value available immediately (cached if usable, otherwise
+ * from the network). When `revalidating` is non-null, a network fetch is in
+ * flight: await it to get the fresh value. Callers can render `data` instantly
+ * and then re-render once `revalidating` resolves.
+ */
+export interface SwrResult {
+ data: T;
+ revalidating: Promise | null;
+}
+
+function cacheKey(command: string, args?: Record): string {
+ const argsStr = args ? JSON.stringify(args, Object.keys(args).sort()) : '';
+ return `${command}:${argsStr}`;
+}
+
+/**
+ * Stale-while-revalidate wrapper around invokeCommand.
+ *
+ * Yields:
+ * 1. Cached data (if available and schema matches) — instant
+ * 2. Fresh network data — if cached data is stale or expired
+ *
+ * Fresh entries (within TTL and not marked stale) short-circuit with
+ * only the cache yield. If no cache exists, only the network result
+ * is yielded.
+ */
+export async function* cachedInvoke(
+ command: string,
+ args: Record | undefined,
+ config: CacheConfig
+): AsyncGenerator<{ data: T; source: 'cache' | 'network' }> {
+ if (isTauri) {
+ const data = await invokeCommand(command, args);
+ yield { data, source: 'network' };
+ return;
+ }
+
+ const key = cacheKey(command, args);
+ const store = getStore();
+
+ const entry = await get>(key, store).catch(() => undefined);
+ const isUsable = entry != null && entry.schemaVersion === CACHE_SCHEMA_VERSION;
+ const isFresh = isUsable && !entry.stale && Date.now() - entry.fetchedAt < config.ttl;
+
+ if (isUsable) {
+ yield { data: entry.data, source: 'cache' };
+ }
+
+ if (isFresh) return;
+
+ const epochAtStart = getEpoch(key);
+
+ try {
+ const data = await invokeCommand(command, args);
+ // Skip the cache write if the key was invalidated while we were fetching —
+ // writing would repopulate the cache with pre-mutation data.
+ if (getEpoch(key) === epochAtStart) {
+ await cacheSet(key, {
+ key,
+ data,
+ fetchedAt: Date.now(),
+ schemaVersion: CACHE_SCHEMA_VERSION,
+ } satisfies CacheEntry);
+ }
+ yield { data, source: 'network' };
+ } catch (err) {
+ if (!isUsable) throw err;
+ console.warn(`[cache] Network error for ${command}, serving stale cache`, err);
+ }
+}
+
+/**
+ * Like invokeCommand, but with SWR caching.
+ *
+ * Returns `{ data, revalidating }`:
+ * - `data` is the best value available immediately (cached if usable, else network).
+ * - `revalidating` is non-null when a background network fetch is in flight;
+ * callers can await it to get the fresh value.
+ */
+export async function cachedCommand(
+ command: string,
+ args: Record | undefined,
+ config: CacheConfig
+): Promise> {
+ if (isTauri) {
+ const data = await invokeCommand(command, args);
+ return { data, revalidating: null };
+ }
+
+ const key = cacheKey(command, args);
+ const store = getStore();
+
+ const entry = await get>(key, store).catch(() => undefined);
+ const isUsable = entry != null && entry.schemaVersion === CACHE_SCHEMA_VERSION;
+ const isFresh = isUsable && !entry.stale && Date.now() - entry.fetchedAt < config.ttl;
+
+ if (isUsable && isFresh) {
+ return { data: entry.data, revalidating: null };
+ }
+
+ const epochAtStart = getEpoch(key);
+ const network = invokeCommand(command, args).then(async (data) => {
+ // Skip the cache write if the key was invalidated while we were fetching —
+ // writing would repopulate the cache with pre-mutation data.
+ if (getEpoch(key) === epochAtStart) {
+ await cacheSet(key, {
+ key,
+ data,
+ fetchedAt: Date.now(),
+ schemaVersion: CACHE_SCHEMA_VERSION,
+ } satisfies CacheEntry);
+ }
+ return data;
+ });
+
+ if (isUsable) {
+ // Stale/expired but usable — return cached data immediately and let the
+ // caller await revalidation. Swallow network errors so the stale entry
+ // remains the resolved value (mirrors cachedInvoke behavior).
+ const revalidating = network.catch((err) => {
+ console.warn(`[cache] Network error for ${command}, serving stale cache`, err);
+ return entry.data;
+ });
+ return { data: entry.data, revalidating };
+ }
+
+ // Miss — must await the network before we can return anything usable.
+ const data = await network;
+ return { data, revalidating: null };
+}
+
+/**
+ * Evict the oldest cache entries (by fetchedAt) until the store is under the
+ * MAX_CACHE_ENTRIES limit. Called after writes and on quota errors.
+ */
+async function evictIfNeeded(): Promise {
+ try {
+ const store = getStore();
+ const allEntries = await entries>(store);
+ if (allEntries.length <= MAX_CACHE_ENTRIES) return;
+
+ // Sort by fetchedAt ascending (oldest first) and evict the excess
+ const sorted = allEntries.sort((a, b) => a[1].fetchedAt - b[1].fetchedAt);
+ const toEvict = sorted.slice(0, sorted.length - MAX_CACHE_ENTRIES);
+ await Promise.all(toEvict.map(([k]) => del(k, store)));
+ } catch {
+ // Best-effort eviction — don't let this block the caller
+ }
+}
+
+/**
+ * Write a cache entry, with quota-error recovery via LRU eviction.
+ */
+async function cacheSet(key: string, entry: CacheEntry): Promise {
+ const store = getStore();
+ try {
+ await set(key, entry, store);
+ } catch (err) {
+ // On quota error, evict old entries and retry once
+ if (err instanceof DOMException && err.name === 'QuotaExceededError') {
+ await evictIfNeeded();
+ await set(key, entry, store).catch(() => {});
+ return;
+ }
+ // Swallow other write errors — cache is best-effort
+ }
+ // Proactive eviction after successful writes
+ evictIfNeeded();
+}
+
+/** Invalidate a specific cache entry. */
+export async function invalidateCache(
+ command: string,
+ args?: Record
+): Promise {
+ if (isTauri) return;
+ const key = cacheKey(command, args);
+ bumpEpoch(key);
+ await del(key, getStore()).catch(() => {});
+}
+
+/** Invalidate all entries for a command (regardless of args). */
+export async function invalidateCacheByCommand(command: string): Promise {
+ if (isTauri) return;
+ const store = getStore();
+ const allKeys = await keys(store);
+ const matching = allKeys.filter((k) => k.startsWith(`${command}:`));
+ matching.forEach((k) => bumpEpoch(k));
+ await Promise.all(matching.map((k) => del(k, store)));
+}
+
+function parseCacheArgs(key: string, command: string): Record | undefined {
+ const prefix = `${command}:`;
+ if (!key.startsWith(prefix)) return undefined;
+
+ try {
+ const parsed = JSON.parse(key.slice(prefix.length)) as unknown;
+ if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) return undefined;
+ return parsed as Record;
+ } catch {
+ return undefined;
+ }
+}
+
+/** Invalidate entries for a command whose cached args include all partial args. */
+export async function invalidateCacheByArgs(
+ command: string,
+ partialArgs: Record
+): Promise {
+ if (isTauri) return;
+ const store = getStore();
+ const allKeys = await keys(store);
+ const matching = allKeys.filter((key) => {
+ const args = parseCacheArgs(key, command);
+ if (!args) return false;
+
+ return Object.entries(partialArgs).every(([argKey, argValue]) => args[argKey] === argValue);
+ });
+ matching.forEach((k) => bumpEpoch(k));
+ await Promise.all(matching.map((k) => del(k, store)));
+}
+
+/** Mark all entries as stale so SWR serves them while revalidating. */
+export async function markAllStale(): Promise {
+ if (isTauri) return;
+ const store = getStore();
+ const allEntries = await entries>(store);
+ await Promise.all(allEntries.map(([k, entry]) => set(k, { ...entry, stale: true }, store)));
+}
+
+/** Remove all cached entries. */
+export async function clearAllCache(): Promise {
+ if (isTauri) return;
+ await clear(getStore());
+}
+
+// Exported for testing
+export {
+ cacheKey as _cacheKey,
+ CACHE_SCHEMA_VERSION as _CACHE_SCHEMA_VERSION,
+ MAX_CACHE_ENTRIES as _MAX_CACHE_ENTRIES,
+ evictIfNeeded as _evictIfNeeded,
+};
diff --git a/apps/staged/src/lib/commands.test.ts b/apps/staged/src/lib/commands.test.ts
index 7789947c4..4efccb22f 100644
--- a/apps/staged/src/lib/commands.test.ts
+++ b/apps/staged/src/lib/commands.test.ts
@@ -7,6 +7,8 @@ describe('browser-native command wrappers', () => {
afterEach(() => {
vi.unstubAllGlobals();
+ vi.doUnmock('./transport');
+ vi.doUnmock('./cache');
});
it('opens URLs with browser navigation in web mode', async () => {
@@ -61,3 +63,117 @@ describe('browser-native command wrappers', () => {
expect(fetch).not.toHaveBeenCalled();
});
});
+
+describe('cached mutation command wrappers', () => {
+ function deferred() {
+ let resolve!: () => void;
+ const promise = new Promise((res) => {
+ resolve = res;
+ });
+ return { promise, resolve };
+ }
+
+ let invokeCommand: ReturnType;
+ let cachedCommand: ReturnType;
+ let invalidateCache: ReturnType;
+ let invalidateCacheByCommand: ReturnType;
+
+ beforeEach(() => {
+ vi.resetModules();
+ invokeCommand = vi.fn();
+ cachedCommand = vi.fn();
+ invalidateCache = vi.fn();
+ invalidateCacheByCommand = vi.fn();
+
+ vi.doMock('./transport', () => ({
+ isTauri: false,
+ invokeCommand,
+ }));
+ vi.doMock('./cache', () => ({
+ cachedCommand,
+ cachedInvoke: vi.fn(),
+ invalidateCache,
+ invalidateCacheByCommand,
+ }));
+ });
+
+ afterEach(() => {
+ vi.doUnmock('./transport');
+ vi.doUnmock('./cache');
+ });
+
+ it('waits for repo list invalidation before resolving addProjectRepo', async () => {
+ const repo = { id: 'repo-1' };
+ const invalidated = deferred();
+ invokeCommand.mockResolvedValue(repo);
+ invalidateCache.mockReturnValue(invalidated.promise);
+
+ const { addProjectRepo } = await import('./commands');
+
+ let settled = false;
+ const result = addProjectRepo('project-1', 'block/builderbot').then((value) => {
+ settled = true;
+ return value;
+ });
+
+ await Promise.resolve();
+ await Promise.resolve();
+
+ expect(invalidateCache).toHaveBeenCalledWith('list_project_repos', { projectId: 'project-1' });
+ expect(settled).toBe(false);
+
+ invalidated.resolve();
+
+ await expect(result).resolves.toBe(repo);
+ });
+
+ it('waits for all project cache invalidations before resolving deleteProject', async () => {
+ const projectsInvalidated = deferred();
+ const branchesInvalidated = deferred();
+ const reposInvalidated = deferred();
+ invokeCommand.mockResolvedValue(undefined);
+ invalidateCacheByCommand
+ .mockReturnValueOnce(projectsInvalidated.promise)
+ .mockReturnValueOnce(branchesInvalidated.promise)
+ .mockReturnValueOnce(reposInvalidated.promise);
+
+ const { deleteProject } = await import('./commands');
+
+ let settled = false;
+ const result = deleteProject('project-1').then(() => {
+ settled = true;
+ });
+
+ await Promise.resolve();
+ await Promise.resolve();
+
+ expect(invalidateCacheByCommand.mock.calls).toEqual([
+ ['list_projects'],
+ ['list_branches_for_project'],
+ ['list_project_repos'],
+ ]);
+ expect(settled).toBe(false);
+
+ projectsInvalidated.resolve();
+ branchesInvalidated.resolve();
+ await Promise.resolve();
+ expect(settled).toBe(false);
+
+ reposInvalidated.resolve();
+
+ await expect(result).resolves.toBeUndefined();
+ });
+
+ it('bypasses the SWR cache when fetching fresh session messages', async () => {
+ const messages = [{ id: 1, sessionId: 'session-1', role: 'assistant', content: 'done' }];
+ invokeCommand.mockResolvedValue(messages);
+
+ const { getFreshSessionMessages } = await import('./commands');
+
+ await expect(getFreshSessionMessages('session-1')).resolves.toBe(messages);
+ expect(invokeCommand).toHaveBeenCalledWith('get_session_messages', {
+ sessionId: 'session-1',
+ });
+ expect(cachedCommand).not.toHaveBeenCalled();
+ });
+});
diff --git a/apps/staged/src/lib/commands.ts b/apps/staged/src/lib/commands.ts
index bcabd7ff2..9f857d801 100644
--- a/apps/staged/src/lib/commands.ts
+++ b/apps/staged/src/lib/commands.ts
@@ -5,6 +5,13 @@
*/
import { invokeCommand, isTauri } from './transport';
+import {
+ cachedCommand,
+ cachedInvoke,
+ invalidateCacheByCommand,
+ invalidateCache,
+ type SwrResult,
+} from './cache';
import type {
Project,
ProjectRepo,
@@ -41,6 +48,15 @@ export interface WorktreeChangesPreview {
conflictedPaths: string[];
}
+// =============================================================================
+// Web access
+// =============================================================================
+
+/** Returns the bearer token for web server authentication (Tauri-only). */
+export function getWebAccessToken(): Promise {
+ return invokeCommand('get_web_access_token');
+}
+
// =============================================================================
// Store status
// =============================================================================
@@ -59,11 +75,11 @@ export function confirmResetStore(): Promise {
// Projects
// =============================================================================
-export function listProjects(): Promise {
- return invokeCommand('list_projects');
+export function listProjects(): Promise> {
+ return cachedCommand('list_projects', undefined, { ttl: 5 * 60_000 });
}
-export function createProject(
+export async function createProject(
name: string,
location: 'local' | 'remote',
githubRepo?: string,
@@ -73,7 +89,7 @@ export function createProject(
defaultBranch?: string,
headRepo?: string
): Promise {
- return invokeCommand('create_project', {
+ const project = await invokeCommand('create_project', {
name,
location,
githubRepo: githubRepo ?? null,
@@ -83,21 +99,28 @@ export function createProject(
defaultBranch: defaultBranch ?? null,
headRepo: headRepo ?? null,
});
+ await invalidateCacheByCommand('list_projects');
+ return project;
}
-export function deleteProject(id: string): Promise {
- return invokeCommand('delete_project', { id });
+export async function deleteProject(id: string): Promise {
+ await invokeCommand('delete_project', { id });
+ await Promise.all([
+ invalidateCacheByCommand('list_projects'),
+ invalidateCacheByCommand('list_branches_for_project'),
+ invalidateCacheByCommand('list_project_repos'),
+ ]);
}
-export function listProjectRepos(projectId: string): Promise {
- return invokeCommand('list_project_repos', { projectId });
+export function listProjectRepos(projectId: string): Promise> {
+ return cachedCommand('list_project_repos', { projectId }, { ttl: 10 * 60_000 });
}
export function listRecentRepos(limit?: number): Promise {
return invokeCommand('list_recent_repos', { limit: limit ?? 10 });
}
-export function addProjectRepo(
+export async function addProjectRepo(
projectId: string,
githubRepo: string,
branchName?: string,
@@ -107,7 +130,7 @@ export function addProjectRepo(
defaultBranch?: string,
headRepo?: string
): Promise {
- return invokeCommand('add_project_repo', {
+ const repo = await invokeCommand('add_project_repo', {
projectId,
githubRepo,
branchName: branchName ?? null,
@@ -117,6 +140,8 @@ export function addProjectRepo(
defaultBranch: defaultBranch ?? null,
headRepo: headRepo ?? null,
});
+ await invalidateCache('list_project_repos', { projectId });
+ return repo;
}
export function updateProjectRepoBranchName(
@@ -127,12 +152,17 @@ export function updateProjectRepoBranchName(
return invokeCommand('update_project_repo_branch_name', { projectId, projectRepoId, branchName });
}
-export function removeProjectRepo(projectId: string, projectRepoId: string): Promise {
- return invokeCommand('remove_project_repo', { projectId, projectRepoId });
+export async function removeProjectRepo(projectId: string, projectRepoId: string): Promise {
+ await invokeCommand('remove_project_repo', { projectId, projectRepoId });
+ await invalidateCache('list_project_repos', { projectId });
}
-export function setPrimaryProjectRepo(projectId: string, projectRepoId: string): Promise {
- return invokeCommand('set_primary_project_repo', { projectId, projectRepoId });
+export async function setPrimaryProjectRepo(
+ projectId: string,
+ projectRepoId: string
+): Promise {
+ await invokeCommand('set_primary_project_repo', { projectId, projectRepoId });
+ await invalidateCache('list_project_repos', { projectId });
}
export function clearProjectRepoReason(projectRepoId: string): Promise {
@@ -230,8 +260,8 @@ export function startProjectSession(
// Branches
// =============================================================================
-export function listBranchesForProject(projectId: string): Promise {
- return invokeCommand('list_branches_for_project', { projectId });
+export function listBranchesForProject(projectId: string): Promise> {
+ return cachedCommand('list_branches_for_project', { projectId }, { ttl: 2 * 60_000 });
}
/** Get a single branch by ID. */
@@ -242,13 +272,20 @@ export function getBranch(branchId: string): Promise {
/** Create a local branch record (DB only — no git worktree yet).
* Returns immediately with worktreePath = null.
* Call `setupWorktree` separately to create the git worktree. */
-export function createBranch(
+export async function createBranch(
projectId: string,
branchName: string,
baseBranch?: string,
projectRepoId?: string
): Promise {
- return invokeCommand('create_branch', { projectId, branchName, baseBranch, projectRepoId });
+ const branch = await invokeCommand('create_branch', {
+ projectId,
+ branchName,
+ baseBranch,
+ projectRepoId,
+ });
+ await invalidateCacheByCommand('list_branches_for_project');
+ return branch;
}
/** Create the git worktree for a local branch and record its workdir.
@@ -309,8 +346,12 @@ export function resumeWorkspace(workspaceName: string): Promise {
return invokeCommand('resume_workspace', { workspaceName });
}
-export function deleteBranch(branchId: string): Promise {
- return invokeCommand('delete_branch', { branchId });
+export async function deleteBranch(branchId: string): Promise {
+ await invokeCommand('delete_branch', { branchId });
+ await Promise.all([
+ invalidateCacheByCommand('list_branches_for_project'),
+ invalidateCache('get_branch_timeline', { branchId }),
+ ]);
}
export function renameBranch(branchId: string, branchName: string): Promise {
@@ -344,12 +385,14 @@ export function pollAllWorkspaceStatuses(
// =============================================================================
const TIMELINE_FRESH_MS = 10_000;
+const TIMELINE_CACHE_TTL = 30_000;
const timelineCache = new Map();
const inFlightTimelines = new Map>();
export function invalidateBranchTimeline(branchId: string): void {
timelineCache.delete(branchId);
inFlightTimelines.delete(branchId);
+ invalidateCache('get_branch_timeline', { branchId });
window.dispatchEvent(
new CustomEvent('timeline-invalidated', { detail: { branchIds: [branchId] } })
);
@@ -375,18 +418,30 @@ export function getBranchTimeline(
}
}
- const request = invokeCommand('get_branch_timeline', { branchId })
- .then((timeline) => {
- if (inFlightTimelines.get(branchId) === request) {
+ // Use cachedInvoke so IndexedDB serves data on cold start while the network
+ // fetch runs in parallel (SWR). The first yield may be cached; the last is
+ // always the freshest available value.
+ let request: Promise | undefined;
+ const timelineRequest = (async () => {
+ let timeline: BranchTimeline | undefined;
+ for await (const { data } of cachedInvoke(
+ 'get_branch_timeline',
+ { branchId },
+ { ttl: TIMELINE_CACHE_TTL }
+ )) {
+ timeline = data;
+ if (request && inFlightTimelines.get(branchId) === request) {
timelineCache.set(branchId, { timeline, fetchedAt: Date.now() });
}
- return timeline;
- })
- .finally(() => {
- if (inFlightTimelines.get(branchId) === request) {
- inFlightTimelines.delete(branchId);
- }
- });
+ }
+ return timeline!;
+ })();
+
+ request = timelineRequest.finally(() => {
+ if (request && inFlightTimelines.get(branchId) === request) {
+ inFlightTimelines.delete(branchId);
+ }
+ });
inFlightTimelines.set(branchId, request);
return request;
@@ -412,6 +467,7 @@ export function invalidateProjectBranchTimelines(branchIds: string[]): void {
for (const id of branchIds) {
timelineCache.delete(id);
inFlightTimelines.delete(id);
+ invalidateCache('get_branch_timeline', { branchId: id });
}
window.dispatchEvent(new CustomEvent('timeline-invalidated', { detail: { branchIds } }));
}
@@ -561,8 +617,8 @@ export interface AcpProviderInfo {
}
/** Scan the system for installed ACP-compatible agents. */
-export function discoverAcpProviders(): Promise {
- return invokeCommand('discover_acp_providers');
+export function discoverAcpProviders(): Promise> {
+ return cachedCommand('discover_acp_providers', undefined, { ttl: 30 * 60_000 });
}
// =============================================================================
@@ -573,7 +629,12 @@ export function getSession(sessionId: string): Promise {
return invokeCommand('get_session', { sessionId });
}
-export function getSessionMessages(sessionId: string): Promise {
+export function getSessionMessages(sessionId: string): Promise> {
+ return cachedCommand('get_session_messages', { sessionId }, { ttl: 5 * 60_000 });
+}
+
+/** Fetch session messages without SWR cache, for terminal status handlers. */
+export function getFreshSessionMessages(sessionId: string): Promise {
return invokeCommand('get_session_messages', { sessionId });
}
@@ -732,8 +793,8 @@ export function getDiffFiles(
branchId: string,
commitSha?: string,
scope: DiffScope = 'branch'
-): Promise {
- return invokeCommand('get_diff_files', { branchId, commitSha, scope });
+): Promise> {
+ return cachedCommand('get_diff_files', { branchId, commitSha, scope }, { ttl: 2 * 60_000 });
}
/** Get the full diff content for a single file. */
@@ -742,8 +803,8 @@ export function getFileDiff(
commitSha: string,
scope: DiffScope,
path: string
-): Promise {
- return invokeCommand('get_file_diff', { branchId, commitSha, scope, path });
+): Promise> {
+ return cachedCommand('get_file_diff', { branchId, commitSha, scope, path }, { ttl: 2 * 60_000 });
}
/** Get file content at a specific ref (for reference files). */
diff --git a/apps/staged/src/lib/features/agents/agent.svelte.ts b/apps/staged/src/lib/features/agents/agent.svelte.ts
index 07f8a8e04..27ce96a37 100644
--- a/apps/staged/src/lib/features/agents/agent.svelte.ts
+++ b/apps/staged/src/lib/features/agents/agent.svelte.ts
@@ -82,9 +82,16 @@ export const agentState = $state({
*/
export async function refreshProviders(): Promise {
try {
- const providers = await discoverAcpProviders();
+ const { data: providers, revalidating } = await discoverAcpProviders();
agentState.providers = providers;
agentState.loaded = true;
+ if (revalidating) {
+ revalidating
+ .then((fresh) => {
+ agentState.providers = fresh;
+ })
+ .catch((e) => console.error('Failed to revalidate ACP providers:', e));
+ }
return providers;
} catch (e) {
console.error('Failed to discover ACP providers:', e);
diff --git a/apps/staged/src/lib/features/branches/BranchCard.svelte b/apps/staged/src/lib/features/branches/BranchCard.svelte
index 7de3ebff9..8833cfbfb 100644
--- a/apps/staged/src/lib/features/branches/BranchCard.svelte
+++ b/apps/staged/src/lib/features/branches/BranchCard.svelte
@@ -730,6 +730,17 @@
return () => window.removeEventListener('project-notes-invalidated', handler);
});
+ // Re-fetch timeline when page resumes from a freeze (cache-stale event)
+ $effect(() => {
+ const handler = () => {
+ if (branchTimelineReadyKey(branch)) {
+ void loadTimeline();
+ }
+ };
+ window.addEventListener('cache-stale', handler);
+ return () => window.removeEventListener('cache-stale', handler);
+ });
+
async function loadTimeline({
timelineKey = branchTimelineReadyKey(branch),
force = false,
diff --git a/apps/staged/src/lib/features/branches/BranchCardPrButton.svelte b/apps/staged/src/lib/features/branches/BranchCardPrButton.svelte
index fc4c3c72f..5756225aa 100644
--- a/apps/staged/src/lib/features/branches/BranchCardPrButton.svelte
+++ b/apps/staged/src/lib/features/branches/BranchCardPrButton.svelte
@@ -434,7 +434,7 @@
try {
if (status === 'completed' && sid) {
- const messages = await commands.getSessionMessages(sid);
+ const messages = await commands.getFreshSessionMessages(sid);
const foundUrl = extractPrUrl(messages);
if (foundUrl) {
@@ -507,7 +507,7 @@
}
try {
- const messages = await commands.getSessionMessages(sid);
+ const messages = await commands.getFreshSessionMessages(sid);
const pipelineOutcome = classifyPipelinePushCompletion(pipeline, messages);
if (pipelineOutcome) return pipelineOutcome;
return classifyCompletedPushSession(pipeline, messages);
diff --git a/apps/staged/src/lib/features/diff/diffViewerState.svelte.ts b/apps/staged/src/lib/features/diff/diffViewerState.svelte.ts
index 16df8640f..11b503419 100644
--- a/apps/staged/src/lib/features/diff/diffViewerState.svelte.ts
+++ b/apps/staged/src/lib/features/diff/diffViewerState.svelte.ts
@@ -43,19 +43,26 @@ export function createDiffViewerState(branchId: string, scope: DiffScope, commit
state.error = null;
try {
- const response = await commands.getDiffFiles(
+ const { data: response, revalidating } = await commands.getDiffFiles(
state.branchId,
state.commitSha ?? undefined,
state.scope
);
if (generation !== contextGeneration) return;
- state.commitSha = response.commitSha;
- state.files = response.files;
-
+ applyDiffFilesResponse(response);
if (state.files.length > 0) {
await selectFile(sharedFileSummaryPath(state.files[0]));
}
+ if (generation === contextGeneration) {
+ state.loading = false;
+ }
+
+ if (revalidating) {
+ const fresh = await revalidating;
+ if (generation !== contextGeneration) return;
+ applyDiffFilesResponse(fresh);
+ }
} catch (e) {
if (generation !== contextGeneration) return;
state.error = e instanceof Error ? e.message : String(e);
@@ -67,6 +74,11 @@ export function createDiffViewerState(branchId: string, scope: DiffScope, commit
}
}
+ function applyDiffFilesResponse(response: { commitSha: string; files: FileDiffSummary[] }) {
+ state.commitSha = response.commitSha;
+ state.files = response.files;
+ }
+
async function selectFile(path: string | null): Promise {
const thisGeneration = ++selectionGeneration;
state.selectedFile = path;
@@ -84,12 +96,29 @@ export function createDiffViewerState(branchId: string, scope: DiffScope, commit
if (cached) return cached;
state.loadingFile = path;
+ const commitSha = state.commitSha;
try {
- const diff = await commands.getFileDiff(state.branchId, state.commitSha, state.scope, path);
+ const { data: diff, revalidating } = await commands.getFileDiff(
+ state.branchId,
+ commitSha,
+ state.scope,
+ path
+ );
const newCache = new Map(state.diffCache);
newCache.set(path, diff);
state.diffCache = newCache;
+
+ if (revalidating) {
+ revalidating
+ .then((fresh) => {
+ if (state.commitSha !== commitSha) return;
+ const next = new Map(state.diffCache);
+ next.set(path, fresh);
+ state.diffCache = next;
+ })
+ .catch(() => {});
+ }
return diff;
} catch (e) {
console.error(`Failed to load diff for ${path}:`, e);
diff --git a/apps/staged/src/lib/features/layout/WebLogin.svelte b/apps/staged/src/lib/features/layout/WebLogin.svelte
new file mode 100644
index 000000000..ea10a83e0
--- /dev/null
+++ b/apps/staged/src/lib/features/layout/WebLogin.svelte
@@ -0,0 +1,137 @@
+
+
+
+
+
+
Staged
+
Enter the access token from the desktop app to connect.
+
+
+
+ {#if error}
+
{error}
+ {/if}
+
+
+
+
diff --git a/apps/staged/src/lib/features/layout/navigation.svelte.ts b/apps/staged/src/lib/features/layout/navigation.svelte.ts
index d04b41981..18971838b 100644
--- a/apps/staged/src/lib/features/layout/navigation.svelte.ts
+++ b/apps/staged/src/lib/features/layout/navigation.svelte.ts
@@ -51,7 +51,7 @@ export async function initNavigation(): Promise {
// Validate the project still exists before navigating to it
try {
- const projects = await commands.listProjects();
+ const { data: projects } = await commands.listProjects();
projectsList.current = projects;
const existingIds = new Set(projects.map((p) => p.id));
if (existingIds.has(lastProjectId)) {
diff --git a/apps/staged/src/lib/features/projects/ProjectHome.svelte b/apps/staged/src/lib/features/projects/ProjectHome.svelte
index 0dcb10f05..0cf798529 100644
--- a/apps/staged/src/lib/features/projects/ProjectHome.svelte
+++ b/apps/staged/src/lib/features/projects/ProjectHome.svelte
@@ -102,7 +102,9 @@
void projectRunActionsStore.startListening();
const onNewProject = () => handleNewProject();
+ const onCacheStale = () => loadData();
window.addEventListener('staged:new-project', onNewProject);
+ window.addEventListener('cache-stale', onCacheStale);
let unlistenDetection: (() => void) | undefined;
listenToRepoActionsDetection((event) => {
@@ -136,18 +138,18 @@
commands.listBranchesForProject(projectId),
commands.listProjectRepos(projectId),
]);
- setProjects(projectsList);
- projects = projectsList;
+ setProjects(projectsList.data);
+ projects = projectsList.data;
const mergedBranches = mergeBranchesPreservingWorktree(
branchesByProject.get(projectId) || [],
- branches
+ branches.data
);
branchesByProject = new Map(branchesByProject).set(projectId, mergedBranches);
commands.invalidateProjectBranchTimelines(mergedBranches.map((b) => b.id));
workspaceLifecycle.enqueueInitialSetup(projectId, mergedBranches);
- replaceProjectRepos(projectId, repos);
+ replaceProjectRepos(projectId, repos.data);
void repoBadgeStore.ensureForRepos(
- repos.map((r) => ({ githubRepo: r.githubRepo, subpath: r.subpath }))
+ repos.data.map((r) => ({ githubRepo: r.githubRepo, subpath: r.subpath }))
);
} catch (e) {
console.error('[ProjectHome] Failed to refresh project after setup progress:', e);
@@ -185,6 +187,7 @@
return () => {
window.removeEventListener('staged:new-project', onNewProject);
+ window.removeEventListener('cache-stale', onCacheStale);
unlistenDetection?.();
unlistenProjectRepoAdded?.();
unlistenPrStatus?.();
@@ -234,89 +237,135 @@
error = null;
await repoBadgeStore.loadAll();
try {
- const projectList = await commands.listProjects();
+ const { data: initialProjectList, revalidating: projectsRevalidating } =
+ await commands.listProjects();
if (generation !== loadGeneration) return;
- projects = projectList;
- setProjects(projectList);
+ await applyProjectList(initialProjectList, generation);
loading = false;
- // Seed maps so project sections can render immediately.
- const branchMap = new Map();
- for (const project of projectList) {
- branchMap.set(project.id, branchesByProject.get(project.id) || []);
+ if (projectsRevalidating) {
+ try {
+ const fresh = await projectsRevalidating;
+ if (generation !== loadGeneration) return;
+ await applyProjectList(fresh, generation);
+ } catch (e) {
+ console.error('[ProjectHome] Failed to revalidate project list:', e);
+ }
}
- branchesByProject = branchMap;
-
- // Drop cached repos for projects that no longer exist.
- const projectIds = new Set(projectList.map((p) => p.id));
- const prunedRepos = new Map();
- for (const [id, repo] of reposById) {
- if (projectIds.has(repo.projectId)) prunedRepos.set(id, repo);
+ } catch (e) {
+ if (generation !== loadGeneration) return;
+ error = e instanceof Error ? e.message : String(e);
+ } finally {
+ if (generation === loadGeneration) {
+ loading = false;
}
- reposById = prunedRepos;
-
- await Promise.all(
- projectList.map(async (project) => {
- try {
- const [branches, repos] = await Promise.all([
- commands.listBranchesForProject(project.id),
- commands.listProjectRepos(project.id),
- ]);
- if (generation !== loadGeneration) return;
- branchesByProject = new Map(branchesByProject).set(project.id, branches);
- workspaceLifecycle.enqueueInitialSetup(project.id, branches);
- replaceProjectRepos(project.id, repos);
-
- // On startup, drain queued sessions for branches that are already ready.
- for (const branch of branches) {
- const isLocalReady = branch.branchType === 'local' && branch.worktreePath;
- const isRemoteReady =
- branch.branchType === 'remote' && branch.workspaceStatus === 'running';
- if (isLocalReady || isRemoteReady) {
- commands.drainQueuedSessions(branch.id).catch((e) => {
- console.error('[ProjectHome] Failed to drain queued sessions on startup:', e);
- });
- }
- }
- } catch (e) {
- console.error(`[ProjectHome] Failed to hydrate project '${project.id}':`, e);
+ }
+ }
+
+ /**
+ * Apply a list of projects fetched from the backend: seed branch/repo maps,
+ * hydrate per-project branches and repos (with SWR revalidation), and
+ * refresh action-detection state. Called once with the cached value and
+ * again if the network revalidation yields fresh data.
+ */
+ async function applyProjectList(projectList: Project[], generation: number) {
+ projects = projectList;
+ setProjects(projectList);
+
+ // Seed maps so project sections can render immediately.
+ const branchMap = new Map();
+ for (const project of projectList) {
+ branchMap.set(project.id, branchesByProject.get(project.id) || []);
+ }
+ branchesByProject = branchMap;
+
+ // Drop cached repos for projects that no longer exist.
+ const projectIds = new Set(projectList.map((p) => p.id));
+ const prunedRepos = new Map();
+ for (const [id, repo] of reposById) {
+ if (projectIds.has(repo.projectId)) prunedRepos.set(id, repo);
+ }
+ reposById = prunedRepos;
+
+ await Promise.all(
+ projectList.map(async (project) => {
+ try {
+ const [branchesResult, reposResult] = await Promise.all([
+ commands.listBranchesForProject(project.id),
+ commands.listProjectRepos(project.id),
+ ]);
+ if (generation !== loadGeneration) return;
+ applyProjectBranches(project.id, branchesResult.data, generation);
+ replaceProjectRepos(project.id, reposResult.data);
+
+ if (branchesResult.revalidating) {
+ branchesResult.revalidating
+ .then((fresh) => applyProjectBranches(project.id, fresh, generation))
+ .catch((e) => {
+ console.error(
+ `[ProjectHome] Failed to revalidate branches for '${project.id}':`,
+ e
+ );
+ });
}
- })
- );
+ if (reposResult.revalidating) {
+ reposResult.revalidating
+ .then((fresh) => {
+ if (generation !== loadGeneration) return;
+ replaceProjectRepos(project.id, fresh);
+ })
+ .catch((e) => {
+ console.error(`[ProjectHome] Failed to revalidate repos for '${project.id}':`, e);
+ });
+ }
+ } catch (e) {
+ console.error(`[ProjectHome] Failed to hydrate project '${project.id}':`, e);
+ }
+ })
+ );
- projectRunActionsStore.hydrateFromProjectBranches(branchesByProject).catch(console.error);
+ projectRunActionsStore.hydrateFromProjectBranches(branchesByProject).catch(console.error);
- // Ensure badges exist for all loaded repos
- const allRepos = [...reposById.values()].map((r) => ({
- githubRepo: r.githubRepo,
- subpath: r.subpath,
- }));
- void repoBadgeStore.ensureForRepos(allRepos);
+ // Ensure badges exist for all loaded repos
+ const allRepos = [...reposById.values()].map((r) => ({
+ githubRepo: r.githubRepo,
+ subpath: r.subpath,
+ }));
+ void repoBadgeStore.ensureForRepos(allRepos);
- try {
- const contexts = await commands.listActionContexts();
- if (generation !== loadGeneration) return;
- detectingProjectIds = new Set(
- projectList
- .filter((project) =>
- contexts.some(
- (context) =>
- context.detectingActions &&
- context.githubRepo === project.githubRepo &&
- context.subpath === project.subpath
- )
+ try {
+ const contexts = await commands.listActionContexts();
+ if (generation !== loadGeneration) return;
+ detectingProjectIds = new Set(
+ projectList
+ .filter((project) =>
+ contexts.some(
+ (context) =>
+ context.detectingActions &&
+ context.githubRepo === project.githubRepo &&
+ context.subpath === project.subpath
)
- .map((project) => project.id)
- );
- } catch (e) {
- console.error('[ProjectHome] Failed to load action contexts:', e);
- }
+ )
+ .map((project) => project.id)
+ );
} catch (e) {
- if (generation !== loadGeneration) return;
- error = e instanceof Error ? e.message : String(e);
- } finally {
- if (generation === loadGeneration) {
- loading = false;
+ console.error('[ProjectHome] Failed to load action contexts:', e);
+ }
+ }
+
+ function applyProjectBranches(projectId: string, branches: Branch[], generation: number) {
+ if (generation !== loadGeneration) return;
+ branchesByProject = new Map(branchesByProject).set(projectId, branches);
+ workspaceLifecycle.enqueueInitialSetup(projectId, branches);
+
+ // On startup, drain queued sessions for branches that are already ready.
+ for (const branch of branches) {
+ const isLocalReady = branch.branchType === 'local' && branch.worktreePath;
+ const isRemoteReady = branch.branchType === 'remote' && branch.workspaceStatus === 'running';
+ if (isLocalReady || isRemoteReady) {
+ commands.drainQueuedSessions(branch.id).catch((e) => {
+ console.error('[ProjectHome] Failed to drain queued sessions on startup:', e);
+ });
}
}
}
@@ -420,9 +469,9 @@
commands.listBranchesForProject(project.id),
commands.listProjectRepos(project.id),
]);
- branchesByProject = new Map(branchesByProject).set(project.id, branches);
- workspaceLifecycle.enqueueInitialSetup(project.id, branches);
- replaceProjectRepos(project.id, repos);
+ branchesByProject = new Map(branchesByProject).set(project.id, branches.data);
+ workspaceLifecycle.enqueueInitialSetup(project.id, branches.data);
+ replaceProjectRepos(project.id, repos.data);
} catch (e) {
console.error('[ProjectHome] Failed to hydrate newly created project:', e);
}
@@ -534,18 +583,18 @@
commands.listBranchesForProject(projectId),
commands.listProjectRepos(projectId),
]);
- setProjects(projectsList);
- projects = projectsList;
+ setProjects(projectsList.data);
+ projects = projectsList.data;
const mergedBranches = mergeBranchesPreservingWorktree(
branchesByProject.get(projectId) || [],
- branches
+ branches.data
);
branchesByProject = new Map(branchesByProject).set(projectId, mergedBranches);
commands.invalidateProjectBranchTimelines(mergedBranches.map((b) => b.id));
workspaceLifecycle.enqueueInitialSetup(projectId, mergedBranches);
- replaceProjectRepos(projectId, repos);
+ replaceProjectRepos(projectId, repos.data);
void repoBadgeStore.ensureForRepos(
- repos.map((r) => ({ githubRepo: r.githubRepo, subpath: r.subpath }))
+ repos.data.map((r) => ({ githubRepo: r.githubRepo, subpath: r.subpath }))
);
} catch (e) {
console.error('Failed to add repo:', e);
@@ -612,10 +661,10 @@
commands.listBranchesForProject(branch.projectId),
commands.listProjectRepos(branch.projectId),
]);
- setProjects(projectsList);
- projects = projectsList;
- branchesByProject = new Map(branchesByProject).set(branch.projectId, branches);
- replaceProjectRepos(branch.projectId, repos);
+ setProjects(projectsList.data);
+ projects = projectsList.data;
+ branchesByProject = new Map(branchesByProject).set(branch.projectId, branches.data);
+ replaceProjectRepos(branch.projectId, repos.data);
} else {
await commands.deleteBranch(branch.id);
// Fallback for legacy branches without repo linkage
diff --git a/apps/staged/src/lib/features/projects/ProjectsList.svelte b/apps/staged/src/lib/features/projects/ProjectsList.svelte
index 983e06996..6e2ba56a7 100644
--- a/apps/staged/src/lib/features/projects/ProjectsList.svelte
+++ b/apps/staged/src/lib/features/projects/ProjectsList.svelte
@@ -289,9 +289,11 @@
deletingProjectNames = next;
loadProjects();
};
+ const onCacheStale = () => loadProjects();
window.addEventListener('staged:new-project', onNewProject);
window.addEventListener('staged:project-delete-start', onProjectDeleteStart);
window.addEventListener('staged:project-delete-end', onProjectDeleteEnd);
+ window.addEventListener('cache-stale', onCacheStale);
// Listen for PR status changes to update branch state
let unlistenPrStatus: UnlistenFn | undefined;
@@ -325,6 +327,7 @@
window.removeEventListener('staged:new-project', onNewProject);
window.removeEventListener('staged:project-delete-start', onProjectDeleteStart);
window.removeEventListener('staged:project-delete-end', onProjectDeleteEnd);
+ window.removeEventListener('cache-stale', onCacheStale);
unlistenPrStatus?.();
};
});
@@ -334,26 +337,15 @@
error = null;
try {
await repoBadgeStore.loadAll();
- const loadedProjects = await commands.listProjects();
- projects = loadedProjects;
- setProjects(loadedProjects);
- void hydrateRepos(loadedProjects);
- // Load branches for each project to calculate PR status
- const branchesMap = new Map();
- await Promise.all(
- loadedProjects.map(async (project) => {
- try {
- const branches = await commands.listBranchesForProject(project.id);
- branchesMap.set(project.id, branches);
- } catch (e) {
- console.error(`Failed to load branches for project ${project.id}:`, e);
- branchesMap.set(project.id, []);
- }
- })
- );
- projectBranches = branchesMap;
+ const { data: initialProjects, revalidating: projectsRevalidating } =
+ await commands.listProjects();
+ await applyProjects(initialProjects);
+ loading = false;
- projectRunActionsStore.hydrateFromProjectBranches(branchesMap).catch(console.error);
+ if (projectsRevalidating) {
+ const fresh = await projectsRevalidating;
+ await applyProjects(fresh);
+ }
} catch (e) {
error = e instanceof Error ? e.message : String(e);
} finally {
@@ -361,14 +353,60 @@
}
}
+ async function applyProjects(loadedProjects: Project[]) {
+ projects = loadedProjects;
+ setProjects(loadedProjects);
+ void hydrateRepos(loadedProjects);
+
+ const branchesMap = new Map();
+ const branchRevalidations: Array<{ projectId: string; promise: Promise }> = [];
+ await Promise.all(
+ loadedProjects.map(async (project) => {
+ try {
+ const { data: branches, revalidating } = await commands.listBranchesForProject(
+ project.id
+ );
+ branchesMap.set(project.id, branches);
+ if (revalidating) {
+ branchRevalidations.push({ projectId: project.id, promise: revalidating });
+ }
+ } catch (e) {
+ console.error(`Failed to load branches for project ${project.id}:`, e);
+ branchesMap.set(project.id, []);
+ }
+ })
+ );
+ projectBranches = branchesMap;
+ projectRunActionsStore.hydrateFromProjectBranches(branchesMap).catch(console.error);
+
+ if (branchRevalidations.length > 0) {
+ void Promise.all(
+ branchRevalidations.map(async ({ projectId, promise }) => {
+ try {
+ const fresh = await promise;
+ projectBranches = new Map(projectBranches).set(projectId, fresh);
+ } catch (e) {
+ console.error(`Failed to revalidate branches for project ${projectId}:`, e);
+ }
+ })
+ ).then(() =>
+ projectRunActionsStore.hydrateFromProjectBranches(projectBranches).catch(console.error)
+ );
+ }
+ }
+
async function hydrateRepos(projectList: Project[]) {
const generation = ++repoLoadGeneration;
reposHydrating = true;
try {
+ const revalidations: Array<{ projectId: string; promise: Promise }> = [];
const entries = await Promise.all(
projectList.map(async (project) => {
try {
- const repos = await commands.listProjectRepos(project.id);
+ const { data: repos, revalidating } = await commands.listProjectRepos(project.id);
+ if (revalidating) {
+ revalidations.push({ projectId: project.id, promise: revalidating });
+ }
return [project.id, repos] as const;
} catch (e) {
console.error(`[ProjectsList] Failed to load repos for project '${project.id}':`, e);
@@ -384,6 +422,20 @@
repos.map((r) => ({ githubRepo: r.githubRepo, subpath: r.subpath }))
);
void repoBadgeStore.ensureForRepos(allRepos);
+
+ for (const { projectId, promise } of revalidations) {
+ void promise
+ .then((fresh) => {
+ if (generation !== repoLoadGeneration) return;
+ reposByProject = new Map(reposByProject).set(projectId, fresh);
+ void repoBadgeStore.ensureForRepos(
+ fresh.map((r) => ({ githubRepo: r.githubRepo, subpath: r.subpath }))
+ );
+ })
+ .catch((e) => {
+ console.error(`[ProjectsList] Failed to revalidate repos for '${projectId}':`, e);
+ });
+ }
} finally {
if (generation === repoLoadGeneration) {
reposHydrating = false;
diff --git a/apps/staged/src/lib/features/sessions/SessionModal.svelte b/apps/staged/src/lib/features/sessions/SessionModal.svelte
index e91f08323..2c56a5412 100644
--- a/apps/staged/src/lib/features/sessions/SessionModal.svelte
+++ b/apps/staged/src/lib/features/sessions/SessionModal.svelte
@@ -400,14 +400,26 @@
loading = true;
error = null;
try {
- const [s, msgs] = await Promise.all([getSession(sessionId), getSessionMessages(sessionId)]);
+ const [s, msgsResult] = await Promise.all([
+ getSession(sessionId),
+ getSessionMessages(sessionId),
+ ]);
if (closed) return;
if (!s) {
error = 'Session not found';
return;
}
session = s;
- messages = msgs;
+ messages = msgsResult.data;
+ if (msgsResult.revalidating) {
+ msgsResult.revalidating
+ .then((fresh) => {
+ if (closed) return;
+ messages = fresh;
+ scrollToBottomIfNear(true);
+ })
+ .catch(() => {});
+ }
} catch (e) {
error = e instanceof Error ? e.message : String(e);
} finally {
@@ -433,7 +445,7 @@
// Incremental message fetch
if (messages.length === 0) {
- const msgs = await getSessionMessages(sessionId);
+ const { data: msgs } = await getSessionMessages(sessionId);
if (closed) return;
if (msgs.length > 0) {
messages = msgs;
diff --git a/apps/staged/src/lib/features/settings/ActionsSettingsPanel.svelte b/apps/staged/src/lib/features/settings/ActionsSettingsPanel.svelte
index ac7a3f232..e02c4c246 100644
--- a/apps/staged/src/lib/features/settings/ActionsSettingsPanel.svelte
+++ b/apps/staged/src/lib/features/settings/ActionsSettingsPanel.svelte
@@ -199,10 +199,10 @@
const contextIdByRepo = new Map(
actionContexts.map((context) => [repoKey(context.githubRepo, context.subpath), context.id])
);
- const projects = await commands.listProjects();
+ const { data: projects } = await commands.listProjects();
const reposByProject = await Promise.all(
projects.map(async (project) => {
- const repos = await commands.listProjectRepos(project.id);
+ const { data: repos } = await commands.listProjectRepos(project.id);
return { project, repos };
})
);
diff --git a/apps/staged/src/lib/features/settings/SettingsPage.svelte b/apps/staged/src/lib/features/settings/SettingsPage.svelte
index 321a7f850..4262db755 100644
--- a/apps/staged/src/lib/features/settings/SettingsPage.svelte
+++ b/apps/staged/src/lib/features/settings/SettingsPage.svelte
@@ -6,9 +6,12 @@
import DoctorSettingsPanel from './DoctorSettingsPanel.svelte';
import GeneralSettingsPanel from './GeneralSettingsPanel.svelte';
import KeyboardSettingsPanel from './KeyboardSettingsPanel.svelte';
- import { isTauri } from '../../transport';
+ import { isTauri, writeClipboardText } from '../../transport';
+ import * as commands from '../../commands';
let appVersion = $state(__APP_VERSION__);
+ let webToken = $state(null);
+ let tokenCopied = $state(false);
onMount(async () => {
if (!isTauri) return;
@@ -19,8 +22,21 @@
} catch (error) {
console.warn('[Settings] Could not load runtime app version', error);
}
+
+ try {
+ webToken = await commands.getWebAccessToken();
+ } catch {
+ // web server may not be running
+ }
});
+ async function copyToken() {
+ if (!webToken) return;
+ await writeClipboardText(webToken);
+ tokenCopied = true;
+ setTimeout(() => (tokenCopied = false), 2000);
+ }
+
function handleBack() {
closeSettings();
}
@@ -94,6 +110,18 @@
+
+ {#if webToken}
+