diff --git a/lib/storage.ts b/lib/storage.ts index 1a6e0585..93154513 100644 --- a/lib/storage.ts +++ b/lib/storage.ts @@ -709,13 +709,17 @@ function getLegacyFlaggedAccountsPath(): string { ); } -async function migrateLegacyProjectStorageIfNeeded( - persist: (storage: AccountStorageV3) => Promise = saveAccounts, -): Promise { +async function migrateLegacyProjectStorageIfNeeded(options?: { + persist?: (storage: AccountStorageV3) => Promise; + commit?: boolean; +}): Promise { + const persist = options?.persist ?? saveAccounts; + const commit = options?.commit ?? true; const state = getStoragePathState(); if (!state.currentStoragePath) { return null; } + const currentStoragePath = state.currentStoragePath; const candidatePaths = [ state.currentLegacyWorktreeStoragePath, @@ -740,10 +744,8 @@ async function migrateLegacyProjectStorageIfNeeded( return null; } - let targetStorage = await loadNormalizedStorageFromPath( - state.currentStoragePath, - "current account storage", - { + const loadCurrentStorageForMigration = async (): Promise => + loadNormalizedStorageFromPath(currentStoragePath, "current account storage", { loadAccountsFromPath: (path) => loadAccountsFromPath(path, { normalizeAccountStorage, @@ -752,11 +754,50 @@ async function migrateLegacyProjectStorageIfNeeded( logWarn: (message, details) => { log.warn(message, details); }, - }, - ); + }); + const readLiveCurrentStorageIfExportMode = async (): Promise<{ + exists: boolean; + storage: AccountStorageV3 | null; + }> => { + if (commit || !existsSync(currentStoragePath)) { + return { exists: false, storage: null }; + } + try { + const { normalized, schemaErrors } = await loadAccountsFromPath( + currentStoragePath, + { + normalizeAccountStorage, + isRecord, + }, + ); + if (schemaErrors.length > 0) { + log.warn("current account storage schema validation warnings", { + path: currentStoragePath, + errors: schemaErrors.slice(0, 5), + }); + } + return { + exists: true, + storage: normalized, + }; + } catch (error) { + if ((error as NodeJS.ErrnoException).code === "ENOENT") { + return { exists: false, storage: null }; + } + throw error; + } + }; + + let targetStorage = await loadCurrentStorageForMigration(); let migrated = false; for (const legacyPath of existingCandidatePaths) { + const liveCurrentStorageBeforeMerge = + await readLiveCurrentStorageIfExportMode(); + if (liveCurrentStorageBeforeMerge.exists) { + return liveCurrentStorageBeforeMerge.storage; + } + const legacyStorage = await loadNormalizedStorageFromPath( legacyPath, "legacy account storage", @@ -775,6 +816,12 @@ async function migrateLegacyProjectStorageIfNeeded( continue; } + const liveCurrentStorageAfterLegacyRead = + await readLiveCurrentStorageIfExportMode(); + if (liveCurrentStorageAfterLegacyRead.exists) { + return liveCurrentStorageAfterLegacyRead.storage; + } + const mergedStorage = mergeStorageForMigration( targetStorage, legacyStorage, @@ -782,49 +829,55 @@ async function migrateLegacyProjectStorageIfNeeded( ); const fallbackStorage = targetStorage ?? legacyStorage; - try { - await persist(mergedStorage); - targetStorage = mergedStorage; - migrated = true; - } catch (error) { - targetStorage = fallbackStorage; - log.warn("Failed to persist migrated account storage", { + if (commit) { + try { + await persist(mergedStorage); + targetStorage = mergedStorage; + migrated = true; + } catch (error) { + targetStorage = fallbackStorage; + log.warn("Failed to persist migrated account storage", { + from: legacyPath, + to: currentStoragePath, + error: String(error), + }); + continue; + } + + try { + await fs.unlink(legacyPath); + log.info("Removed legacy account storage file after migration", { + path: legacyPath, + }); + } catch (unlinkError) { + const code = (unlinkError as NodeJS.ErrnoException).code; + if (code !== "ENOENT") { + log.warn( + "Failed to remove legacy account storage file after migration", + { + path: legacyPath, + error: String(unlinkError), + }, + ); + } + } + + log.info("Migrated legacy project account storage", { from: legacyPath, - to: state.currentStoragePath, - error: String(error), + to: currentStoragePath, + accounts: mergedStorage.accounts.length, }); continue; } - try { - await fs.unlink(legacyPath); - log.info("Removed legacy account storage file after migration", { - path: legacyPath, - }); - } catch (unlinkError) { - const code = (unlinkError as NodeJS.ErrnoException).code; - if (code !== "ENOENT") { - log.warn( - "Failed to remove legacy account storage file after migration", - { - path: legacyPath, - error: String(unlinkError), - }, - ); - } - } - - log.info("Migrated legacy project account storage", { - from: legacyPath, - to: state.currentStoragePath, - accounts: mergedStorage.accounts.length, - }); + targetStorage = mergedStorage; + migrated = true; } if (migrated) { return targetStorage; } - if (targetStorage && !existsSync(state.currentStoragePath)) { + if (targetStorage && !existsSync(currentStoragePath)) { return targetStorage; } return null; @@ -1263,7 +1316,7 @@ async function loadAccountsInternal( const resetMarkerPath = getIntentionalResetMarkerPath(path); await cleanupStaleRotatingBackupArtifacts(path); const migratedLegacyStorage = persistMigration - ? await migrateLegacyProjectStorageIfNeeded(persistMigration) + ? await migrateLegacyProjectStorageIfNeeded({ persist: persistMigration }) : null; try { @@ -1432,6 +1485,48 @@ async function loadAccountsInternal( } } +async function loadAccountsForExport(): Promise { + // Export reuses this helper from both paths in `exportAccounts()`. Keep the + // read side effect free so export never clears a reset marker or races with + // concurrent writers while normalizing legacy storage for the snapshot. + const path = getStoragePath(); + const resetMarkerPath = getIntentionalResetMarkerPath(path); + + if (existsSync(resetMarkerPath)) { + return createEmptyStorageWithMetadata(false, "intentional-reset"); + } + + try { + const { normalized, schemaErrors } = await loadAccountsFromPath(path, { + normalizeAccountStorage, + isRecord, + }); + if (schemaErrors.length > 0) { + log.warn("Account storage schema validation warnings", { + errors: schemaErrors.slice(0, 5), + }); + } + if (existsSync(resetMarkerPath)) { + return createEmptyStorageWithMetadata(false, "intentional-reset"); + } + return normalized; + } catch (error) { + const code = (error as NodeJS.ErrnoException).code; + if (existsSync(resetMarkerPath)) { + return createEmptyStorageWithMetadata(false, "intentional-reset"); + } + if (code === "ENOENT") { + const migratedLegacyStorage = + await migrateLegacyProjectStorageIfNeeded({ commit: false }); + if (existsSync(resetMarkerPath)) { + return createEmptyStorageWithMetadata(false, "intentional-reset"); + } + return migratedLegacyStorage; + } + throw error; + } +} + async function saveAccountsUnlocked(storage: AccountStorageV3): Promise { const path = getStoragePath(); const resetMarkerPath = getIntentionalResetMarkerPath(path); @@ -1788,9 +1883,8 @@ export async function exportAccounts( force, currentStoragePath, transactionState: getTransactionSnapshotState(), - loadAccountsInternal: () => loadAccountsInternal(saveAccountsUnlocked), - readCurrentStorage: () => - withAccountStorageTransaction((current) => Promise.resolve(current)), + readCurrentStorageUnlocked: () => loadAccountsForExport(), + readCurrentStorage: () => withStorageLock(() => loadAccountsForExport()), exportAccountsToFile, beforeCommit, logInfo: (message, details) => { diff --git a/lib/storage/account-port.ts b/lib/storage/account-port.ts index b14f29f5..814b1ce6 100644 --- a/lib/storage/account-port.ts +++ b/lib/storage/account-port.ts @@ -11,7 +11,7 @@ export async function exportAccountsSnapshot(params: { snapshot: AccountStorageV3 | null; } | undefined; - loadAccountsInternal: () => Promise; + readCurrentStorageUnlocked: () => Promise; readCurrentStorage: () => Promise; exportAccountsToFile: (args: { resolvedPath: string; @@ -28,7 +28,7 @@ export async function exportAccountsSnapshot(params: { params.transactionState.storagePath === params.currentStoragePath ? params.transactionState.snapshot : params.transactionState?.active - ? await params.loadAccountsInternal() + ? await params.readCurrentStorageUnlocked() : await params.readCurrentStorage(); await params.exportAccountsToFile({ diff --git a/test/account-port.test.ts b/test/account-port.test.ts index 4dc3ece1..825fd390 100644 --- a/test/account-port.test.ts +++ b/test/account-port.test.ts @@ -5,8 +5,17 @@ import { } from "../lib/storage/account-port.js"; describe("account port helpers", () => { - it("exports transaction snapshot when active", async () => { + it("exports transaction snapshot when active for the current storage path", async () => { const exportAccountsToFile = vi.fn(async () => undefined); + const snapshot = { + version: 3 as const, + accounts: [{ refreshToken: "snapshot-token" }], + activeIndex: 0, + activeIndexByFamily: {}, + }; + const readCurrentStorageUnlocked = vi.fn(); + const readCurrentStorage = vi.fn(); + await exportAccountsSnapshot({ resolvedPath: "/tmp/out.json", force: true, @@ -14,19 +23,93 @@ describe("account port helpers", () => { transactionState: { active: true, storagePath: "/tmp/accounts.json", + snapshot, + }, + readCurrentStorageUnlocked, + readCurrentStorage, + exportAccountsToFile, + logInfo: vi.fn(), + }); + expect(readCurrentStorageUnlocked).not.toHaveBeenCalled(); + expect(readCurrentStorage).not.toHaveBeenCalled(); + expect(exportAccountsToFile).toHaveBeenCalledWith( + expect.objectContaining({ + storage: snapshot, + }), + ); + }); + + it("reads current storage without reusing a stale transaction snapshot from another path", async () => { + const exportAccountsToFile = vi.fn(async () => undefined); + const readCurrentStorageUnlocked = vi.fn(async () => ({ + version: 3 as const, + accounts: [{ refreshToken: "live-token" }], + activeIndex: 0, + activeIndexByFamily: {}, + })); + const readCurrentStorage = vi.fn(); + + await exportAccountsSnapshot({ + resolvedPath: "/tmp/out.json", + force: true, + currentStoragePath: "/tmp/accounts.json", + transactionState: { + active: true, + storagePath: "/tmp/other.json", snapshot: { version: 3, - accounts: [], + accounts: [{ refreshToken: "stale-token" }], activeIndex: 0, activeIndexByFamily: {}, }, }, - loadAccountsInternal: vi.fn(), - readCurrentStorage: vi.fn(), + readCurrentStorageUnlocked, + readCurrentStorage, exportAccountsToFile, logInfo: vi.fn(), }); - expect(exportAccountsToFile).toHaveBeenCalled(); + + expect(readCurrentStorageUnlocked).toHaveBeenCalledTimes(1); + expect(readCurrentStorage).not.toHaveBeenCalled(); + expect(exportAccountsToFile).toHaveBeenCalledWith( + expect.objectContaining({ + storage: expect.objectContaining({ + accounts: [{ refreshToken: "live-token" }], + }), + }), + ); + }); + + it("reads current storage via the locked reader when no transaction is active", async () => { + const exportAccountsToFile = vi.fn(async () => undefined); + const readCurrentStorageUnlocked = vi.fn(); + const readCurrentStorage = vi.fn(async () => ({ + version: 3 as const, + accounts: [{ refreshToken: "locked-read-token" }], + activeIndex: 0, + activeIndexByFamily: {}, + })); + + await exportAccountsSnapshot({ + resolvedPath: "/tmp/out.json", + force: true, + currentStoragePath: "/tmp/accounts.json", + transactionState: undefined, + readCurrentStorageUnlocked, + readCurrentStorage, + exportAccountsToFile, + logInfo: vi.fn(), + }); + + expect(readCurrentStorageUnlocked).not.toHaveBeenCalled(); + expect(readCurrentStorage).toHaveBeenCalledTimes(1); + expect(exportAccountsToFile).toHaveBeenCalledWith( + expect.objectContaining({ + storage: expect.objectContaining({ + accounts: [{ refreshToken: "locked-read-token" }], + }), + }), + ); }); it("imports through transaction helper and logs result", async () => { diff --git a/test/experimental-sync-target-entry.test.ts b/test/experimental-sync-target-entry.test.ts index cd4e4e22..41652fd3 100644 --- a/test/experimental-sync-target-entry.test.ts +++ b/test/experimental-sync-target-entry.test.ts @@ -40,9 +40,24 @@ describe("experimental sync target entry", () => { }); }); - it("wires windows-safe retry options through readJson", async () => { + it("provides windows lock retry policy to the injected reader", async () => { const sleep = vi.fn(async () => undefined); - const readFileWithRetry = vi.fn(async () => '{"hello":"world"}'); + const retryAttempts: string[] = []; + const readFileWithRetry = vi.fn(async (_path, options) => { + expect(options.retryableCodes.has("EBUSY")).toBe(true); + expect(options.retryableCodes.has("EPERM")).toBe(true); + expect(options.retryableCodes.has("EAGAIN")).toBe(true); + expect(options.retryableCodes.has("EACCES")).toBe(false); + expect(options.retryableCodes.has("ENOTEMPTY")).toBe(false); + expect(options.maxAttempts).toBe(4); + + while (retryAttempts.length < 2) { + retryAttempts.push("EBUSY"); + await options.sleep(25 * retryAttempts.length); + } + + return '{"hello":"world"}'; + }); const normalizeAccountStorage = vi.fn(() => null); let capturedReadJson: ((path: string) => Promise) | undefined; @@ -66,14 +81,45 @@ describe("experimental sync target entry", () => { }); expect(capturedReadJson).toBeDefined(); - expect(readFileWithRetry).toHaveBeenCalledWith("C:\\state.json", { - retryableCodes: new Set(["EBUSY", "EPERM", "EAGAIN", "ENOTEMPTY", "EACCES"]), - maxAttempts: 4, - sleep, - }); + expect(readFileWithRetry).toHaveBeenCalledTimes(1); + expect(sleep).toHaveBeenNthCalledWith(1, 25); + expect(sleep).toHaveBeenNthCalledWith(2, 50); expect(normalizeAccountStorage).toHaveBeenCalledWith({ hello: "world" }); }); + it("propagates fail-fast lock errors that are outside the retryable set", async () => { + const sleep = vi.fn(async () => undefined); + const readFileWithRetry = vi.fn(async (_path, options) => { + expect(options.retryableCodes.has("EACCES")).toBe(false); + expect(options.retryableCodes.has("ENOTEMPTY")).toBe(false); + throw Object.assign(new Error("denied"), { code: "EACCES" }); + }); + const normalizeAccountStorage = vi.fn(() => null); + + const loadExperimentalSyncTargetState = vi.fn(async (args) => { + await args.readJson("C:\\state.json"); + return { + kind: "target" as const, + detection: { kind: "target" as const }, + destination: null, + }; + }); + + await expect( + loadExperimentalSyncTargetEntry({ + loadExperimentalSyncTargetState, + detectTarget: createDetectedTarget, + readFileWithRetry, + normalizeAccountStorage, + sleep, + }), + ).rejects.toMatchObject({ code: "EACCES" }); + + expect(readFileWithRetry).toHaveBeenCalledTimes(1); + expect(sleep).not.toHaveBeenCalled(); + expect(normalizeAccountStorage).not.toHaveBeenCalled(); + }); + it("propagates malformed json parse failures to the caller", async () => { const readFileWithRetry = vi.fn(async () => "not-valid-json{{{"); const normalizeAccountStorage = vi.fn(() => null); diff --git a/test/storage.test.ts b/test/storage.test.ts index 4903d606..929a30b6 100644 --- a/test/storage.test.ts +++ b/test/storage.test.ts @@ -4,6 +4,8 @@ import { tmpdir } from "node:os"; import { dirname, join } from "node:path"; import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; import { getConfigDir, getProjectStorageKey } from "../lib/storage/paths.js"; +import { setStoragePathState } from "../lib/storage/path-state.js"; +import { getIntentionalResetMarkerPath } from "../lib/storage/backup-paths.js"; import { buildNamedBackupPath, clearAccounts, @@ -1233,7 +1235,6 @@ describe("storage", () => { }); it("should fail export when no accounts exist", async () => { - const { exportAccounts } = await import("../lib/storage.js"); setStoragePathDirect(testStoragePath); await clearAccounts(); await expect(exportAccounts(exportPath)).rejects.toThrow( @@ -1297,6 +1298,657 @@ describe("storage", () => { } }); + it("exports legacy-migrated storage without persisting it during another storage transaction", async () => { + const transactionStoragePath = join(testWorkDir, "accounts-transaction.json"); + const currentStoragePath = join(testWorkDir, "accounts-current.json"); + const legacyStoragePath = join(testWorkDir, "accounts-legacy.json"); + await fs.writeFile( + transactionStoragePath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "transaction", + refreshToken: "transaction-token", + addedAt: 1, + lastUsed: 1, + }, + ], + }), + ); + await fs.writeFile( + legacyStoragePath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "legacy", + refreshToken: "legacy-token", + addedAt: 1, + lastUsed: 1, + }, + ], + }), + ); + + setStoragePathDirect(transactionStoragePath); + try { + await withAccountStorageTransaction(async () => { + setStoragePathState({ + currentStoragePath, + currentLegacyProjectStoragePath: legacyStoragePath, + currentLegacyWorktreeStoragePath: null, + currentProjectRoot: null, + }); + await exportAccounts(exportPath); + }); + + const exported = JSON.parse(await fs.readFile(exportPath, "utf-8")); + const transactionStorage = JSON.parse( + await fs.readFile(transactionStoragePath, "utf-8"), + ); + expect(exported.accounts).toEqual([ + expect.objectContaining({ refreshToken: "legacy-token" }), + ]); + expect(transactionStorage.accounts).toEqual([ + expect.objectContaining({ refreshToken: "transaction-token" }), + ]); + expect(existsSync(currentStoragePath)).toBe(false); + expect(existsSync(legacyStoragePath)).toBe(true); + } finally { + setStoragePathDirect(testStoragePath); + } + }); + + it("does not persist v3 normalization while export reads storage unlocked", async () => { + const transactionStoragePath = join(testWorkDir, "accounts-transaction.json"); + const currentStoragePath = join(testWorkDir, "accounts-v1.json"); + await fs.writeFile( + currentStoragePath, + JSON.stringify({ + version: 1, + activeIndex: 0, + accounts: [ + { + refreshToken: "legacy-token", + addedAt: 1, + lastUsed: 1, + }, + ], + }), + ); + + setStoragePathDirect(transactionStoragePath); + try { + await withAccountStorageTransaction(async () => { + setStoragePathState({ + currentStoragePath, + currentLegacyProjectStoragePath: null, + currentLegacyWorktreeStoragePath: null, + currentProjectRoot: null, + }); + await exportAccounts(exportPath); + }); + + const onDisk = JSON.parse( + await fs.readFile(currentStoragePath, "utf-8"), + ); + const exported = JSON.parse(await fs.readFile(exportPath, "utf-8")); + expect(onDisk.version).toBe(1); + expect(exported.version).toBe(3); + expect(exported.accounts).toEqual([ + expect.objectContaining({ refreshToken: "legacy-token" }), + ]); + } finally { + setStoragePathDirect(testStoragePath); + } + }); + + it("does not persist v3 normalization while export reads storage with the lock", async () => { + const currentStoragePath = join(testWorkDir, "accounts-v1-locked.json"); + await fs.writeFile( + currentStoragePath, + JSON.stringify({ + version: 1, + activeIndex: 0, + accounts: [ + { + refreshToken: "legacy-token", + addedAt: 1, + lastUsed: 1, + }, + ], + }), + ); + + setStoragePathDirect(currentStoragePath); + try { + await exportAccounts(exportPath); + + const onDisk = JSON.parse( + await fs.readFile(currentStoragePath, "utf-8"), + ); + const exported = JSON.parse(await fs.readFile(exportPath, "utf-8")); + expect(onDisk.version).toBe(1); + expect(exported.version).toBe(3); + expect(exported.accounts).toEqual([ + expect.objectContaining({ refreshToken: "legacy-token" }), + ]); + } finally { + setStoragePathDirect(testStoragePath); + } + }); + + it.each(["EBUSY", "EPERM", "EAGAIN"] as const)( + "rethrows %s when export cannot read the current storage file", + async (code) => { + const lockedStoragePath = join(testWorkDir, `accounts-${code}.json`); + await fs.writeFile( + lockedStoragePath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "locked", + refreshToken: "locked-token", + addedAt: 1, + lastUsed: 1, + }, + ], + }), + ); + + const actualStorageParser = await vi.importActual< + typeof import("../lib/storage/storage-parser.js") + >("../lib/storage/storage-parser.js"); + vi.resetModules(); + vi.doMock("../lib/storage/storage-parser.js", () => ({ + ...actualStorageParser, + loadAccountsFromPath: vi.fn(async (path, deps) => { + if (path === lockedStoragePath) { + throw Object.assign(new Error(`locked ${code}`), { code }); + } + return actualStorageParser.loadAccountsFromPath(path, deps); + }), + })); + + try { + const isolatedStorageModule = await import("../lib/storage.js"); + isolatedStorageModule.setStoragePathDirect(lockedStoragePath); + await expect( + isolatedStorageModule.exportAccounts(exportPath), + ).rejects.toMatchObject({ code }); + } finally { + vi.doUnmock("../lib/storage/storage-parser.js"); + vi.resetModules(); + setStoragePathDirect(testStoragePath); + } + }, + ); + + it.each(["EBUSY", "EPERM", "EAGAIN"] as const)( + "does not write an export file when %s happens while reading another storage path during a transaction", + async (code) => { + const transactionStoragePath = join( + testWorkDir, + `accounts-transaction-${code}.json`, + ); + const currentStoragePath = join(testWorkDir, `accounts-live-${code}.json`); + await fs.writeFile( + transactionStoragePath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "transaction", + refreshToken: "transaction-token", + addedAt: 1, + lastUsed: 1, + }, + ], + }), + ); + await fs.writeFile( + currentStoragePath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "live", + refreshToken: "live-token", + addedAt: 1, + lastUsed: 1, + }, + ], + }), + ); + + const actualStorageParser = await vi.importActual< + typeof import("../lib/storage/storage-parser.js") + >("../lib/storage/storage-parser.js"); + vi.resetModules(); + vi.doMock("../lib/storage/storage-parser.js", () => ({ + ...actualStorageParser, + loadAccountsFromPath: vi.fn(async (path, deps) => { + if (path === currentStoragePath) { + throw Object.assign(new Error(`locked ${code}`), { code }); + } + return actualStorageParser.loadAccountsFromPath(path, deps); + }), + })); + + try { + const isolatedStorageModule = await import("../lib/storage.js"); + const isolatedPathState = await import("../lib/storage/path-state.js"); + isolatedStorageModule.setStoragePathDirect(transactionStoragePath); + await expect( + isolatedStorageModule.withAccountStorageTransaction(async () => { + isolatedPathState.setStoragePathState({ + currentStoragePath, + currentLegacyProjectStoragePath: null, + currentLegacyWorktreeStoragePath: null, + currentProjectRoot: null, + }); + await isolatedStorageModule.exportAccounts(exportPath); + }), + ).rejects.toMatchObject({ code }); + + const transactionStorage = JSON.parse( + await fs.readFile(transactionStoragePath, "utf-8"), + ); + expect(transactionStorage.accounts).toEqual([ + expect.objectContaining({ refreshToken: "transaction-token" }), + ]); + expect(existsSync(exportPath)).toBe(false); + } finally { + vi.doUnmock("../lib/storage/storage-parser.js"); + vi.resetModules(); + setStoragePathDirect(testStoragePath); + } + }, + ); + + it("does not revive legacy accounts when the current storage exists but is empty", async () => { + const currentStoragePath = join(testWorkDir, "accounts-empty-current.json"); + const legacyStoragePath = join(testWorkDir, "accounts-empty-legacy.json"); + await fs.writeFile( + currentStoragePath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [], + }), + ); + await fs.writeFile( + legacyStoragePath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "legacy", + refreshToken: "legacy-token", + addedAt: 1, + lastUsed: 1, + }, + ], + }), + ); + + setStoragePathDirect(currentStoragePath); + try { + setStoragePathState({ + currentStoragePath, + currentLegacyProjectStoragePath: legacyStoragePath, + currentLegacyWorktreeStoragePath: null, + currentProjectRoot: null, + }); + + await expect(exportAccounts(exportPath)).rejects.toThrow( + /No accounts to export/, + ); + + const currentStorage = JSON.parse( + await fs.readFile(currentStoragePath, "utf-8"), + ); + expect(currentStorage.accounts).toEqual([]); + expect(existsSync(legacyStoragePath)).toBe(true); + expect(existsSync(exportPath)).toBe(false); + } finally { + setStoragePathDirect(testStoragePath); + } + }); + + it("exports legacy storage without persisting it when current storage is missing", async () => { + const currentStoragePath = join(testWorkDir, "accounts-missing-current.json"); + const legacyStoragePath = join(testWorkDir, "accounts-missing-legacy.json"); + await fs.writeFile( + legacyStoragePath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "legacy", + refreshToken: "legacy-token", + addedAt: 1, + lastUsed: 1, + }, + ], + }), + ); + + setStoragePathDirect(currentStoragePath); + try { + setStoragePathState({ + currentStoragePath, + currentLegacyProjectStoragePath: legacyStoragePath, + currentLegacyWorktreeStoragePath: null, + currentProjectRoot: null, + }); + + await exportAccounts(exportPath); + + const exported = JSON.parse(await fs.readFile(exportPath, "utf-8")); + expect(exported.accounts).toEqual([ + expect.objectContaining({ refreshToken: "legacy-token" }), + ]); + expect(existsSync(currentStoragePath)).toBe(false); + expect(existsSync(legacyStoragePath)).toBe(true); + } finally { + setStoragePathDirect(testStoragePath); + } + }); + + it("does not export legacy accounts when an intentional reset marker appears during export fallback migration", async () => { + const currentStoragePath = join( + testWorkDir, + "accounts-reset-during-fallback-current.json", + ); + const legacyStoragePath = join( + testWorkDir, + "accounts-reset-during-fallback-legacy.json", + ); + const resetMarkerPath = + getIntentionalResetMarkerPath(currentStoragePath); + await fs.writeFile( + legacyStoragePath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "legacy", + refreshToken: "legacy-token", + addedAt: 1, + lastUsed: 1, + }, + ], + }), + ); + + const actualStorageParser = await vi.importActual< + typeof import("../lib/storage/storage-parser.js") + >("../lib/storage/storage-parser.js"); + vi.resetModules(); + vi.doMock("../lib/storage/storage-parser.js", () => ({ + ...actualStorageParser, + loadAccountsFromPath: vi.fn(async (path, deps) => { + if (path === legacyStoragePath && !existsSync(resetMarkerPath)) { + await fs.writeFile(resetMarkerPath, ""); + } + return actualStorageParser.loadAccountsFromPath(path, deps); + }), + })); + + try { + const isolatedStorageModule = await import("../lib/storage.js"); + const isolatedPathState = await import("../lib/storage/path-state.js"); + isolatedPathState.setStoragePathState({ + currentStoragePath, + currentLegacyProjectStoragePath: legacyStoragePath, + currentLegacyWorktreeStoragePath: null, + currentProjectRoot: null, + }); + + await expect( + isolatedStorageModule.exportAccounts(exportPath), + ).rejects.toThrow(/No accounts to export/); + + expect(existsSync(currentStoragePath)).toBe(false); + expect(existsSync(resetMarkerPath)).toBe(true); + expect(existsSync(legacyStoragePath)).toBe(true); + expect(existsSync(exportPath)).toBe(false); + } finally { + vi.doUnmock("../lib/storage/storage-parser.js"); + vi.resetModules(); + setStoragePathDirect(testStoragePath); + } + }); + + it("does not revive legacy accounts when the current storage reappears before export merges legacy storage", async () => { + const currentStoragePath = join( + testWorkDir, + "accounts-reappeared-current.json", + ); + const legacyStoragePath = join( + testWorkDir, + "accounts-reappeared-legacy.json", + ); + await fs.writeFile( + legacyStoragePath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "legacy", + refreshToken: "legacy-token", + addedAt: 1, + lastUsed: 1, + }, + ], + }), + ); + + const actualStorageParser = await vi.importActual< + typeof import("../lib/storage/storage-parser.js") + >("../lib/storage/storage-parser.js"); + let recreateCurrentStorage = true; + vi.resetModules(); + vi.doMock("../lib/storage/storage-parser.js", () => ({ + ...actualStorageParser, + loadAccountsFromPath: vi.fn(async (path, deps) => { + if (path === currentStoragePath && recreateCurrentStorage) { + recreateCurrentStorage = false; + await fs.writeFile( + currentStoragePath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [], + }), + ); + throw Object.assign(new Error("missing current storage"), { + code: "ENOENT", + }); + } + return actualStorageParser.loadAccountsFromPath(path, deps); + }), + })); + + try { + const isolatedStorageModule = await import("../lib/storage.js"); + const isolatedPathState = await import("../lib/storage/path-state.js"); + isolatedPathState.setStoragePathState({ + currentStoragePath, + currentLegacyProjectStoragePath: legacyStoragePath, + currentLegacyWorktreeStoragePath: null, + currentProjectRoot: null, + }); + + await expect( + isolatedStorageModule.exportAccounts(exportPath), + ).rejects.toThrow(/No accounts to export/); + + const currentStorage = JSON.parse( + await fs.readFile(currentStoragePath, "utf-8"), + ); + expect(currentStorage.accounts).toEqual([]); + expect(existsSync(legacyStoragePath)).toBe(true); + expect(existsSync(exportPath)).toBe(false); + } finally { + vi.doUnmock("../lib/storage/storage-parser.js"); + vi.resetModules(); + setStoragePathDirect(testStoragePath); + } + }); + + it.each(["EBUSY", "EPERM", "EAGAIN"] as const)( + "rethrows %s when the current storage reappears locked during export fallback", + async (code) => { + const currentStoragePath = join( + testWorkDir, + `accounts-reappeared-locked-${code}.json`, + ); + const legacyStoragePath = join( + testWorkDir, + `accounts-reappeared-legacy-${code}.json`, + ); + await fs.writeFile( + legacyStoragePath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "legacy", + refreshToken: "legacy-token", + addedAt: 1, + lastUsed: 1, + }, + ], + }), + ); + + const actualStorageParser = await vi.importActual< + typeof import("../lib/storage/storage-parser.js") + >("../lib/storage/storage-parser.js"); + let currentReadCount = 0; + vi.resetModules(); + vi.doMock("../lib/storage/storage-parser.js", () => ({ + ...actualStorageParser, + loadAccountsFromPath: vi.fn(async (path, deps) => { + if (path === currentStoragePath) { + currentReadCount += 1; + if (currentReadCount === 1) { + await fs.writeFile( + currentStoragePath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [], + }), + ); + throw Object.assign( + new Error("missing current storage"), + { code: "ENOENT" }, + ); + } + throw Object.assign(new Error(`locked ${code}`), { code }); + } + return actualStorageParser.loadAccountsFromPath(path, deps); + }), + })); + + try { + const isolatedStorageModule = await import("../lib/storage.js"); + const isolatedPathState = await import("../lib/storage/path-state.js"); + isolatedPathState.setStoragePathState({ + currentStoragePath, + currentLegacyProjectStoragePath: legacyStoragePath, + currentLegacyWorktreeStoragePath: null, + currentProjectRoot: null, + }); + + await expect( + isolatedStorageModule.exportAccounts(exportPath), + ).rejects.toMatchObject({ code }); + + const currentStorage = JSON.parse( + await fs.readFile(currentStoragePath, "utf-8"), + ); + expect(currentStorage.accounts).toEqual([]); + expect(existsSync(legacyStoragePath)).toBe(true); + expect(existsSync(exportPath)).toBe(false); + } finally { + vi.doUnmock("../lib/storage/storage-parser.js"); + vi.resetModules(); + setStoragePathDirect(testStoragePath); + } + }, + ); + + it("does not revive legacy accounts when the current storage has an intentional reset marker", async () => { + const currentStoragePath = join(testWorkDir, "accounts-reset-current.json"); + const legacyStoragePath = join(testWorkDir, "accounts-reset-legacy.json"); + await fs.writeFile( + legacyStoragePath, + JSON.stringify({ + version: 3, + activeIndex: 0, + activeIndexByFamily: {}, + accounts: [ + { + accountId: "legacy", + refreshToken: "legacy-token", + addedAt: 1, + lastUsed: 1, + }, + ], + }), + ); + + setStoragePathDirect(currentStoragePath); + await clearAccounts(); + try { + setStoragePathState({ + currentStoragePath, + currentLegacyProjectStoragePath: legacyStoragePath, + currentLegacyWorktreeStoragePath: null, + currentProjectRoot: null, + }); + + await expect(exportAccounts(exportPath)).rejects.toThrow( + /No accounts to export/, + ); + + expect(existsSync(currentStoragePath)).toBe(false); + expect(existsSync(legacyStoragePath)).toBe(true); + expect(existsSync(exportPath)).toBe(false); + } finally { + setStoragePathDirect(testStoragePath); + } + }); + it("should fail import when file does not exist", async () => { const { importAccounts } = await import("../lib/storage.js"); const nonexistentPath = join(testWorkDir, "nonexistent-file.json");